var/home/core/zuul-output/0000755000175000017500000000000015137312252014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015137325405015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000334413015137325230020257 0ustar corecore}ikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs$r.k9GfD .$~3EZ͖o_˖wKo///Oo}͛ji^|1Fbg_>cV*˿mVˋ^<~UWy]L-͗_pU_P|Xûx{AtW~3 _P/&R/xDy~rJ_/*ofXx$%X"LADA@@tgV~.}-+zvy J+WF^i4JpOO pzM6/vs?}fVj6'pߤT)x[@Y[`VQYY0gr.W9{r&r%LӶ`zVTngzr1QFh k0&S V&@i{ C2i1Gdē $Kٻւ(Ĩ$#TLX h~lx}%n6:SFAWW.%T%2gL[: ԓ$aсdt֫-g[ .`:J ]HmS>v5gCh31 )Kh3i J1hG{aD4iӌçN/e]5o9iF]u54!h/9Y@$9GAOI=2,!N{\00{B"唄(-"Η.U) _.UX-?0haxC}~xr\t2Hgb*t.-|Hp(-J CO==:zR{܃ l&|'oZƧe7ΣԟRxxXԨkJ[8 ";ЗH F=y܅sθ m@%2gqvD]%X&;cɻK0I٘]_}zy tt('VѯTB/fp#W9f%$P[4D2L'1bЛ\\s΍ic-̕4+ާ-wA9ϷՒbw֜}>| TXNrdTs>RDPhإek-*듌D[5ol2v=OoVYTNʹ<ws~^B.Ǔ'AfS'/Eȗ`hmsJU # DuT%ZPt_ďPv`9 C|kRR*- F? ?xtfj>Pwȹl;M@v0If{5C/(uT ݨB.ʣa('OĢkqCK 6Ɍ[wOw0rjQz}.&Xz$AX0-B-lNv*]d1N^\(~5ɑUIU"$`SFKa"j[Hp'yf?ϼηE,4IVkC 5ܫa!뭠vKtzz*7 55E9Fa?Z[80ݞN|{:AОNodMoe:[t.ٗ*AqΝ6 ޮv?Qw):yt٘8c'8Ai؋ah E ur C&`XR FcgB~R2EL9j7e\(Uё$׿atyХ?*7t=z\+`EΣ@Klb5)%L%7׷n] gv6دϾF}DN73Sl:f *H7(?PЃkLyw&7ԟ-\صp&NI%`t3ViӽLb㸵2*3d*mQ%"h+ "f "D(~~moH|E3*46$A2R;R:gO7A}Ss8 ΁woeor(Ё^g׬JyU{v3F|lţ@U5$~J0+`قSᔙD'Ad ѭj( ۞O r:91v|ɛr9lw`!i:ګPSPٔ3@5;ȕ}PkڪH9' |":", 1Ҫ9 %lg&:2JC!Mjܽ#`RJX4Q2:IGӸۡshN+60#:mufe߽aY.hǑ sVq*T+ w%fx6 %u̩1hӰc%AYW ZY~a__y[z`rVA,f=A}h&fFAtĘ5dw}EaޭVZ=C}!‹,ƍwͩ?9}5oF2(Y}I7^{͗y~x^|^Ŝt;yȾt{|2hKNh`0,9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶou|8Qz:^S-7;k>W~L><~-W].EoV%#?'W۱bWЀA=YUt4NרnN@甗yLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[7j39_WiZSس:$3ɾ3,<S1wg y &SL9qk;OP> ,դjtah-j:_[7Wg_0K>є0vNۈ/:= T u)1 QLLj`K -D,(7N*,< JDA?VǞ©H\@mϛ~W-ce{0d8}gp/G\2u<ΰ+~tŨ!u}$K6tem@t):êtx: `)L`m GƂ%k1On,}P*]"U!2`i*6J'2I+6֗%)FV[/GWM\jP' HT؋ BvaijH{=]b~3Z|]ɍ%ʺsl!C>Egl1$9  ֲQ$'dJ<6=bs^)'Hz ȍq!a}P `x ăil_=!X;__ZNۈO;j -ݙrbH9t֥s`Mh,욕v`;VI&ϊх{211 VSxSew{~n}Lr7+Tcj.R2>e3V#b,PY0TEu1L/]LTB4$`H6NI\b*AYA\(u|@ [h-,j7gDTÎ4oZ$j!.?lm$K/$s_. WM]̍"W%`lO2-"ew@E=Y]Q4`Iz_*2coT'ƟlQ.Ff!bpRw@\6"yr+i37Z_j*YLfnYJ~Z~okJX ?A?gU3U;,ד1t7lJ#wՆ;I|p"+I4ˬZcն a.1wXhxDI:;.^m9W_c.4z+ϟMn?!ԫ5H&=JkܓhkB\LQ"<LxeLo4l_m24^3.{oɼʪ~75/nQ?s d|pxu\uw?=QR -Mݞίk@Pc n1æ*m$=4Dbs+J \EƄզ}@۶(ޓmmU̸nG pHKZ{{Qo}i¿Xc\]e1e,5`te.5Hhao<[50wMUF􀍠PV?Yg"ź)\3mf|ܔMUiU|Ym! #'ukMmQ9Blm]TO1ba.XW x6ܠ9[v35H;-]Um4mMrW-k#~fؤϋu_j*^Wj^qM `-Pk.@%=X#|ۡb1lKcj$׋bKv[~"N jS4HOkeF3LPyi︅iWk! cAnxu6<7cp?WN $?X3l(?  'Z! ,Z.maO_Bk/m~ޖ(<qRfR"Au\PmLZ"twpuJ` mvf+T!6Ѓjw1ncuwo':o gSPC=]U҅yY9 &K<-na'Xk,P4+`Þ/lX/bjFO.= w ?>ȑ3n߿z,t s5Z/ Clo-` z?a~b mzkC zFȏ>1k*Dls6vP9hS  ehC.3 @6ijvUuBY hBnb[ Fr#D7ćlA!:X lYE>#0JvʈɌ|\u,'Y˲.,;oOwoj-25Hݻ7 li0bSlbw=IsxhRbd+I]Y]JP}@.供SЃ??w w@KvKts[TSa /ZaDžPAEư07>~w3n:U/.P珀Yaٳ5Ʈ]խ4 ~fh.8C>n@T%W?%TbzK-6cb:XeGL`'žeVVޖ~;BLv[n|viPjbMeO?!hEfޮ])4 ?KN1o<]0Bg9lldXuT ʑ!Iu2ʌnB5*<^I^~G;Ja߄bHȌsK+D"̽E/"Icƀsu0,gy(&TI{ U܋N5 l͖h"褁lm *#n/Q!m b0X3i)\IN˭% Y&cKoG w 9pM^WϋQf7s#bd+SDL ,FZ<1Kx&C!{P|Ռr,* ] O;*X]Eg,5,ouZm8pnglVj!p2֬uT[QyB402|2d5K: `Bcz|Rxxl3{c` 1nhJzQHv?hbºܞz=73qSO0}Dc D]ͺjgw07'㤸z YJ\Hb9Ɖ„2Hi{(2HFE?*w*hy4ޙM^٫wF(p]EwQzr*! 5F XrO7E[!gJ^.a&HߣaaQÝ$_vyz4}0!yܒ栒޹a% Ŋ X!cJ!A\ ?E\R1 q/rJjd A4y4c+bQ̘TT!kw/nb͵FcRG0xeO sw5TV12R7<OG5cjShGg/5TbW > ]~Wޠ9dNiee$V[\[Qp-&u~a+3~;xUFFW>'ǣC~방u)т48ZdH;j a]`bGԹ#qiP(yڤ~dO@wA[Vz/$NW\F?H4kX6)F*1*(eJAaݡ krqB}q^fn 8y7P  GRޠkQn>eqQntq"Occ°NRjg#qSn02DŔw:ؽ 5l)Fa/TTmCԤ{"9b{ywSXE*m#3U ùRIvޏrJ`k|wJKH:O*OKy`( ݢe*{ ua ȻݔhvOkU~OǠI/aǕ-JMX _.6KsjA Qsmd  O#F.Uf28ZAgy>y,d$C?v01q5e.Um>]RLa&r?+@6k&#l)I5_> ` D s5npo}/ؙq #a2V?X~.4O/'|/_|&q̑0dd4>vk 60D _o~[Sw3ckpkpLNa ^j 5*<&}kˢmqvۗj=<Tr=[ a^؃ È(<^=xZb [_tܡ&yЋ{ Sym^?̑sU~' Ԓ f\itu)b>5X -$sٻҦ62L0ډ"ܺ_z9JNȯ=@oUI y4QE[/Y5d{zrBܖ6Hlc "mKv~[uLU4lZ;xEN'oI㤛rP*jC# 6@dmHg1$ʇȠh#CBΤ{sTQ{%w)7@y1K^ ].Y$46[B-3%OONw8d`Q4d$x0t8@t]y1T\YAidtxBG:pɨyeNg4n]M؞ e}Wn6׳i~'ہZ*FU{fXڃP'Hl4 ,ŸqMHDCYZz Qnz܁$Jp04ȴIL΃.0FiO-qy)i_TA|S2G4miBȨHM(2hys|F 94 DNlϒòκ-q|xC ,gKDzHR%t+E/wd#礱ºȄWEz o\JξB.wLKZ39(M +(PWՇfR6#ю3Ȋt ݪbh]MTw䀩S]'qf&)-_G;"1qz퇛0,#yiq$ՁɄ)KٮޓJ|̖D?:3mhW=rOf'/wѹ8BS8]`;=?,ڼ"ϴq*(A7? /W= #^ub"6q f+=^OI@߱^F[n4A#bYѤwd)J^Z{*ǥzw73LuaVad=$6)iI gC~.1%YmҪ+2gSt!8iIۛ*JgE7LGoş\bC}O i ycK1YhO6 /g:KT sPv6l+uN|!"VS^΄t*3b\N7dYܞLcn3rnNd8"is"1- ޑܧd[]~:'#;N(NknfV('I rcj2J1G<5 Nj̒Qh]ꍾZBn&Un' CyUM0nCj.&Oڣg\q0^Ϻ%4i" ZZG>Xr'XKc$2iσֹH<6N8HSg>uMik{Fm(W F@@{W+ߑ?X2hS4-=^YgpUHެbZ!y!ul@ڼ63" ۩:6=TZõ$E,ϓRV|G&$rr;J TtIHFE=RȬ]P pLm|?$%>Eü%mWO[>Xmw,*9.[G n >X8Ī;xW%dT:`ٓ~:QO,}j6j!yڦʲT:Pqҋh] H+&=>g| Z;D8ܶb:! Å{2:+au 6:!fF+0#+̬NY"!6a7#񕪰%:r|o5Znڧs?si/W qEU馥˟^_޶oڷOj'?nc]Rn\t3^邳塨Lɏ"8k8M~?M}Xuv~4Le͢ }UVM)[A`b}mcE]LCEg=2ȴcmZ?E*-8nhױ1xR2ϫCya` A y!?h!9yL%VLU2gr26A!4vbSG ]ꧧWp/ &ee *w$-`J\ ptǣC^p#_`{ К8EW>*(D{ٛ,[fnY𱹞M=6&$<,"lX-Ǐ_whaE 98 (oѢ/Р΅ 7ցl6618ł_1/=fu).s¯?.S[{'g=Ҥ):d8h\y6]t1T7IUV:;.1& ,5΀j:<< +Y?58In'bXIǣO{&V\DŽ0,9f O_"[l:h¢8wݓ19\:f6:+ .3}=uvKc ٹeS<>ij(o'ciS<{1$E[nP b?8E'xv[K+E{,Qƙ1*dcs_Z'407|qBOgYU|U--sG8`u! qGYܷw;ȌCPc_|(RaIBKb+{P.T! =ĦiTob d<>SHr][KqWs7ѝBYǭ~RR"p9dFg|K- obY_vM 4>/]e/dy,8!xŋ5 R<^mYo 3c9(F?hXf~TTX)QӅtӚe~=WtX-sJb?U'3X7J4l+Cj%LPFxŰAVG Y%.9Vnd8? ǫjU3k%E)OD:"Ϳ%E)=}l/'O"Q_4ILAٍKK7'lWQVm0c:%UEhZ].1lcazn2ͦ_DQP/2 re%_bR~r9_7*vrv |S.Z!rV%¢EN$i^B^rX؆ z1ǡXtiK`uk&LO./!Z&p:ˏ!_B{{s1>"=b'K=}|+: :8au"N@#=Ugzy]sTv||Aec Xi.gL'—Ʃb4AUqػ< &}BIrwZ\"t%>6ES5oaPqobb,v 2w s1,jX4W->L!NUy*Gݓ KmmlTbc[O`uxOp  |T!|ik3cL_ AvG i\fs$<;uI\XAV{ˍlJsŅjЙNhwfG8>Vڇg18 O3E*dt:|X`Z)|z&V*"9U_R=Wd<)tc(߯)Y]g5>.1C( .K3g&_P9&`|8|Ldl?6o AMҪ1EzyNAtRuxyn\]q_ߍ&zk.)Eu{_rjuWݚ;*6mMq!R{QWR=oVbmyanUn.Uqsy.?W8 r[zW*8nؿ[;vmcoW]"U;gm>?Z֒Z6`!2XY]-Zcp˿˘ɲ}MV<в~!?YXV+lx)RRfb-I7p)3XɯEr^,bfbKJ'@hX><[@ ,&,]$*բk-Yv5 '1T9!(*t 0'b@񲱥-kc6VnR0h& 0Z|ђ8 CGV[4xIIWN?Yt>lf@ Vi`D~ڇŁQLLkY <ZPKoma_u` !>Z;3F\dEB n+0Z ?&s{ 6(E|<ޭLk1Yn(F!%sx]>CTl9"و5 |ݹր|/#.w0ޒx"khD?O`-9C| &8֨O8VH5uH)28 Ǿ-R9~ +#e;U6]aD6Xzqd5y n';)VKL]O@b OIAG Lmc 2;\d˽$Mu>WmCEQuabAJ;`uy-u.M>9VsWٔo RS`S#m8k;(WAXq 8@+S@+' 8U˜z+ZU;=eTtX->9U-q .AV/|\ǔ%&$]1YINJ2]:a0OWvI.O6xMY0/M$ *s5x{gsəL3{$)ՆbG(}1wt!wVf;I&Xi43غgR 6 ݩJ$)}Ta@ nS*X#r#v6*;WJ-_@q.+?DK១btMp1 1Gȩ f,M`,Lr6E} m"8_SK$_#O;V 7=xLOu-ȹ2NKLjp*: 'SasyrFrcC0 ѱ LKV:U} -:U8t[=EAV$=i[mhm"roe5jqf$i>;V0eOޞ4ccc2J1TN.7q;"sդSP) 0v3-)-ٕAg"pZ: "ka+n!e߮lɹL V3Os\ဝ+A= 2䣔AzG\ ` \vc"Kj61O Px"3Pc /' PW*3GX liWv-6W&)cX |]O;C%8@*Z1%8Gk@5^NtY"Fbi8D'+_1&1 7U^k6v읨gQ`LRx+I&s5Www` q:cdʰ H`X;"}B=-/M~C>''1R[sdJm RD3Q{)bJatdq>*Ct/GǍ-`2:u)"\**dPdvc& HwMlF@a5`+F>ΰ-q>0*s%Q)L>$ćYV\dsEGز/:ٕycZtO 2ze31cDB/eWy!A/V4cbpWaPBIpqS<(lȣ'3K?e Z?ڠ8VSZM}pnqL f2D?mzq*a[~;DY〩b𻾋-]f8dBմVs6傊zF"daeY(R+q%sor|.v\sfa:TX%;3Xl= \k>kqBbB;t@/Cԍ)Ga[ r=nl-w/38ѮI*/=2!j\FW+[3=`BZWX Zd>t*Uǖ\*Fu6Y3[yBPj|LcwaIuR;uݷ㺾|47ߍeys=.EinE% 1zY\+͕߬VͭW_겼cazyU1wOw)Ǽn@6 |lk'Z|VZpsqL5 څB}>u)^v~,󿴝} 3+m𢛲Pz_Sp2auQAP*tLnIXA6L7 8UgKdT)*7>p{Pgi-b)>U6IXabPde Ӽ8Ģ8GɄnb'G ֤Mcv4?>HC78NE@UMc8>`TvZ:}O Kޕ,EJ``g&c8d}&)q+<|_U$%KULÛD].UZ (@Wqxɖd jSWx=̏U .ncՅG$0V(+ ޑeQƕ<66BcUJ&B.!$xB UASVs1u;nϒ/yTH!Uaj?o!kGq*Sh-ţ2R\EM eqe9#xY\zd,ڳ6sBKj.\"aG<hҚir4-Ÿq^{@V"/뙯,rABϴ~$_AaYBz?U#&`ؾ= rE?I7q ^uDZ,ĹdZ̴_n,%_NngD a$qSW)C\&) "/I_ЅХCن04Gf.5@Q#uDQ`C`}c|?򤛮9/|%_*t_^ 1nšB>B%1]x)2}wT)Fe28 kM:qΛŒU8gndL¾C i\ZـJD2 ׋D/U^ 3Pl{ }*.kx=].Vy(U?3ٞ}ˑf[F`,M˲W3pc@z(4o<y3-Qv]`8d(.G-;?e5U!OfA E#*Ŗz>/ e6 DVi%(oJMfcP(2YSߋ)(V9yezIšT (c_:tr.dQEzvOwEVWլ]%XK<ߒ^W\e4Tg(/')$ΰN-gKiM&N4є{XBU~;̂b`ު`~tw$(R OnD;k55@: /hh Jqa\?%`䕜NNڟ2C<̓-û+ twϓ3'hKD6=эϲ&?tWi'O)Ad8AFx;Aď,2hDVy)bTRi WzܹVE6^aőG?O ='A=rgֵi6fdt7t,[L5y. yHxsic?IgS\`0 5ϪhQĻ"Ǹ+w5M+N+xؗr*&dXh+SttgsT.o=ysFS$@|W=_R?e7e(Ip8ߐ0ػO>I-B= XP] O8l!qww֐}tcO46gy.u\SfPYo} {yg#3?M^s7X\j?U*E!.%OORw ^k0煔$m̿<Ъ(;6!zw|6dgr(,~XthIPo7<-> #omy Vy̞cU&T1(G LU튒B9+P ??%Q'd2gMTpϻBEwO=[yH\G^O4 )>mw@麂ZRhMVHAҰN>讧6͛Jëh0h+R,/3kiZW$NЉvT<h({uv-E_D<"ph<x`zECj$yh)Ã߶#iOo VU%~.&Ǣ1 ihb̊2g`fx;<y:o]ĂŴP~;#d2}FCc)vq muh (&ZWtxн2W=VHmCh_jw*I vNN?L em4Z ũԘgQӝ-8;ƹ=aq౟4 (YRezC.*/z7\\il}[@8E.7s ]x. /F4#ܭ9#w9,,J3@Ķ$@K|?m Dx*">0Q^e!2+83rEQ}J3> :8mNV4†&vG6UŖ_SLEn=r{e>&pmKZk!ЄĿP5g;B|ا nGy!g_WUuz! (/Ce[U`lFؽ:hʊ^X ؆ڗ")c+Rxz` WxԇDݫ0DciX@ ojK+kId޷{8r[-ܔRKUq^;2o+~ l5bFYHĵD^|CSv=`[i-gy)WR?L #yaGA8q׀AXUZ5/4-’Kn,Q#LS$-(k!բR~id1I4-򦒗RŨCW< jL0W @Mt--Ŝ–4hKq& kw qUjI`e>0I1ăjY@aί,XFbַlCEa%j>gR¶pƨ£o,' ?E<_hE@;4¾%$ޠK j-gGwHj>RsIՕ! .ہ{Hms[}<շ3LLzeyD4\%!e[ԖMɨf GT%+<ƴoҴUnʳ5> D Wy?pV6U6S2;ז`ZSC+Gd뱀C T}=솗"pN8% #VH XZl[[RgGmսk+Qt0n+w_lj,OW"E[p"$jO3Yɪ֚2!Z1GN sEՕy4"2Wgj8[byb=C#jlA7VA$e*F Xɻq %V;CW{{ƮpSO7>E,EdȰjoE&]Iz*tBZZ1F#X0T?L}xᷭn-M/}jcXg?v4-Lk&ᱍ?9@aTš!&0ve({w15`;`{1ٞy;{7:yj߀s<ڙOpGM_,F5 ;vEwy~Z5 'wgxQ_2p4Pl3!G;59@{]7$?u40{:mtQOpջ3 ?l,4`{2Ҍ1VhpKGc 1B1rfXvZ:1ss>qٻm-Wbw) Ey4h3L:@Dwl+d{ItNLM"C(p U~۲K7pևϭFQ_H$xpY^!O[Y}<DW0G n5!6K >XH'wodVw=TYXbWX87;:}8:}<;/e8'0xwX s][*yi#u b;uA(a_ P\ޏhAـ_"|7Gle ؙiHD{\,8!= ,&&. I`@q ??N$w9 P vd{x .;car.BJҕ{(#@ Ot \ থP ΜgBY@(k{\`(DF>E< gg C@/sESpo"ڵusF.ۥWăh#s8SN:&9Da"N/c:ğkEРw wׇ#8wv]:/.n'1 ҄;A_w[]>cb=D=q[&+8v})zaSNqHx}} X wES{ BҴd rCw8.ǐzq>r{@/Xa/{}q;Y$js w~0eF9\wNC^F*=]E#q螧Dqt.78gFCr%/@OoГa{Wgeүo]Pqgn#J؎7[+{(^lKqѤ"U)5RԤR,59.L ؙ$2i[,m'Ͳ65B΃R ^ d+2+5x^'՝ DJՁ,]MKy"T(,K~GIQqUN5I U*Knv*I50"tn?avVBNur!:ג輚J (*8(+d^H G`ނ'f>CyuƛU9̷f3(<;!Ȑ~Պ,OI1UϦv"ܘ1 gH6Ⴏ g"Tٹ}~s&(.<ad\& n! Dt"D~`5/GPE:\ѺZ Xodpp1NMFTU `:5oaC(1+x$3x~!D)DkUNWc&Y vD(F92䠋Ob1 pg]>.| =7tl_5_b,9qu tYl4F /*8GyfIy=XqU2`6mlm )aH @۬:8:;&7TR#p"}z)=;;,P56ᖭrZY$~Mt(i^.ĥ`Jҷ@+f&Hõ|ŵ:(p4_ T@D_HNiYݡ7` e 5SQNܾpTq|&=9|1ZuoqiyQX$`TZ^h?,}K kZ~H7 y(p$H A"0N(uIdQٜ=IjQ0J3G A?vܣcN$=>q,t5AԼb@Ij5\ =739SRGwpQ9kk~9O3vӈDXЅv.o dϖ[ȨnMM#:rB8و;=1߈iWF~:!l,dgr̅l`|j*)BVsCvEbO>Ol xVΰD@$UvWagT;mY틝VrV|7?xꍨWM3&:ZIRۊz謪^"Sm u$綊.T' &~~!y%J O(}}7t1TQV׸ؽF1CԱ}3lZlZHeANO#}$uB O#?P:z=Pz{A?I:<{Ah? Ih:F=葄F[]%BWZ2+ogDHA1Ch=&wyԦ.gSqMQZNGOnu\D*9FX2gZZvjK/U6|m>oo@9 3`|]п9/bW OɂAUQb*Tpړ~8 }rHn<8_<1UO[&墘EC.qJ' UPzXxO E?/<ßUQe6Ь~i3"[v!oeeS WXouct:G6¬ h[W/H9W.(x,"=,MoA=ߊ-iG V`PO400" 'le{%&P͸m6?^'3xk2Y LDƚ3[38J%༲NYNb*+J lπ!$= l'bx,5}|~efEdAy[Hׯ+eByPi m•D"M?He8_ysW*JF,dn,Eu=fNaS@# 04l1`LM޷-{|'_cҊ?1^B*(s8zgpଛH[*8 \9_Td sgh0uG1G굥~Aa8hC-aL10<_p7 2g!;Deũl\a:O+3=0Z1N pvvTᙡ:D5NͰbqdnI 6U<3Ĝ!fbx:De<ݭPDEWM; ?=Ox\r*йh*#҅vFngW,Gr;_${-CecvkiB^D}ѿtg$;P'"c XXz-\_6q dO[]ex{Nڬ-eٶ}WTiQ9D:X@q^*&8"-^ʻ۽:*.5}9$-Rexi ݆%yDJx_(ƕ%.YARHTTH2kYqE(ѮORc0VlGЦZTZΌpr j`U\YWgR YEk^twhY27m6'} wbuD" }`-gx^S-gm@4e}* Oٽ4wd_럞0t@ NJfgJ 8hX=iYVY1'6L!BIYwb+$Df.rGf&J3 W¦+QZ~2Ghӥ:c!H[`S NXQب3 DcXHyphɶA*&K Je:j kڴH5ʕljوb"H~ULU*ȬkN ۼh+ۨsoڜ%]Lҭ~G4CHȷ}aoIYNk@2pkI{Ĝ\I&(>=Q=-wVY>Z^Tx6r#OKt.y~[Ԋ&wr!x<W#R{`2<򮵷$|'U , k1}~Q%S}EJwS{si@%.^uwǩ.s$IK(ďW?t?1sj/)/Oc_^?Zv/-r*q#>ͤc*n~5qx-pa?<-ԛo:ٵ?OV>N|M;ɤOOW|vfF6/{cF[?0kLC3o,oxZGӴNM5O?(‹rwd. &`v[^Kn;W{\Y&MGBSi#or LBɸ7#3n>b"?^´`*ZBa ܀/*fM|w5Y3hmةhy|=yvRXm҂AJFCA*95/ ׄ%h"5o.?Ek6$OPdr%>nn>9ff2D raMϷ!̌1*QkA!LTV~#I*9Q΃I3)xdG1 l\MP q$12gdUHʐC%H&~F\~Ct6UfJdŃ'\GjǂK)6v6?/]pLibLĠG "Z1&gdKg8ovG=GC!bUKz!}h%SԪe3:h5é?KI'~X~ FNey8wme,.8Z7'qubzSxQt $s&X:2.93HI`٤S[kO;}e 7;560x[!X:zG.Dnp.1 -bRJ`%ܮ6_eY# |5)qC 2YhE3p ìX j,g_x{ bxŽ" }io pXw=C.y'<2._ǨCQB'ib#уɴƸ L  1#Q6=2PGH6;%5IJ@,Ҁ6T˕US3 S6|$Hn7^T1)xIc3KphM%w n͵nĩ+&2 90\9\! YD 1NMt@vOw]p\X )IrحҾmĨKH1:N`HָrUޢꚷnl0phM]4j#2=Ϯ #,w$M1ݦ0*CTmUR6kؾ|X:t[{$$z,לm&#Jl?r,gm@+XOZJι(` ! _~HjlPZ+[`@e ΐxV%E1ӃUL@)\:Q; hn\bQHJjb?Ά(E2WJ*r&僢m^,kܢ\#^Dj&٥ y 2aT(\ jxt C}3Wv5nn獄@"s\n0D 33:I!UsnV^ey$rVsg|Je P bRȁ]@`45*fS?78ti漮yuC^z;)'vsp*t2|א2K!b8|qgBo8q<9hĢVSawbv=tΩkC N]01̆*$SmĂ>ղ⥮8  I[JUko1x5sXImX:-O]:G<{F*d"S 9&i+biJ|Q2#JKq{G )XZ|EV@m,k:Fs-K}LzvW -g4/YqlEmmA+ƛge\Eb2?,~uqpR#1VK3C¿6.8ڱa1Q ( @OxD2Y/Kp7R~fMڔ@prQU t #k*韕SRov]pcys1-\DVCdLf5blw1#~Qao vNS -Vit&%還ϋB3S,c88wi\Όj@ R%M"H 3i,", D!fR2L[{Ǩa?IJsf~ on(DzpULSGK%B&Nk}X},/,y)%gD=v7|p[RbaPŝhȃ\XIfc;hh En<;ƃ 0&d}Ԇbb ^}I2ȝZEה 8 %}u NU~ á VTAٖuw`[㜨B4 qՕreq}b\Bu1 NPDbJ_0u$X:Owcc\ýJkl\ Eۖb{HA({OWlŢ$5zbÚZ@[[\;]"C/J}~ui]d(+JJX~x1a5_bJ)sNΊ)ŭg_n] xpC9]z1P/9d9XҫI.m.=P̭a)ŢNMBĢ1B||_oTJ+Ͳ4ogS(…}덤 ^: 7) 1DhRDMt-k삣J )9D{}hv[#Q.-j=jJEkkxXMyܗCR߂`C??f}c!C'-QBwuܶ]m5pnQY<%/GGgs HzW\t)w9=;ݛ I҅STI=.8&З9)s`WYm}uH}CϮZ [˄N=2Z駧.9z4>1(A")ia EQ`ZKj@-yWn}׳* ~ MJKǾ&oIa >MS肫<4FtWMǁV F-g!Gijz}Z]0-Kb@xFGɕ1rpܥo6OiU. )Sa˒x$s]آsTvm]H3F=Z[H )}SKCOEq={ FM#˚VjHsΆl/e䞆RPr;9mk~&0.UK GYץK 1K GR耂X3RY"R~%WK8v͢k22̤yp]h .Q N^_/U[?Mk$JAcyYRƑ3,%zQD RKKvQȎ D})p)FP+͢H,HH!2J&S&gO_wJΙ8RL[Un=knT\!{|x삣{VDׇ{ȡwmv_gvQgc9(#ƻyKufå);˺ K efJ̙w]&8c;|R{Ǘe(lץ?N}KCy )9GFX9 4Ͼ5Ѿ~pG`QY&՘?Xrol+nԫ+(aWMn ^/k8ѫ#u2QGZ赿f=~,o`.cC6˴ojħLEgݢ*(U5+9wQݥT|]}"qEΙѯڄb.X{HRr]=`v.Hӵ(<8%TXGUhG MN@|X]mzl4ggC}6]pT ޯ>Wqz5/;}?^[ah]O(ZbBdq΋wz=e)moG9$τZx'dbu t秃]66"Gqt|xחv nnqo-[0pg_ ݴHa! k9}Z_^fF)f!pF6ݮwɏ [蝲pU94HnxJƤ)!aN>?1(ٷ MӚ0]o^{l]pUzEotFD*>tw׃`Ly<$ɜDJ^Df~g .ٽ=v#/npڥzAGVcV6;dO~w5mUvBB`D)iARn!jq Jjcn^9zVDV>]xiʼP(/ܝ(g޵qc2eoQnm?{zU#%{ȑeGG5vTy;CR;ɳ=8Yw;odu._9ǧ}Zi< G" FmmOBT5%q$Dy pXRp2T0u)]]ZPO9@kvw1A N@p1}Xf1Zy| {>nwlҕJލ"_<@AA>m}_↑\wQT5FEYk.zCU?禛 FagYx0FwrE7@ GhHFx=vtO[߳:)(bjGbAJJp>|W e[=R" .LG0]t=\Ya}w\χs6tx5Dc4:m d.E3|앷נoɕy0*~Mw|uIKxShplm`P%<6ʝ*}Ӹi}982{~Td&hC;)G+!_ :y0#r1 6 F%x0I7$0T-\ 1W 1?L(s($@h?۵k=LKP3sJBH(KiE/aʯ$j:iRId_2@E+˫v$[0'j ShM.zd4 YtֳѠfL|]޵~B[ 4eګ eU^;ㆣǬFV *rO7.qflJ4p~20Emj>G[W1+j%SdB SNb^N?={yӠoSa] DnerQg Jm~9I\ [qŹHύ~ɛhYfRdN_S ?bvLP-HenLCh |EuIn؞|=о)^9RY滋8UN})?UQ]I,U}SFb@ZIiO[~g+Nm Uoʉxg;DG'If[XNyQ3 &@eYfҗIy|[ Ce^.)1J$ߩ$d.z=3 Y|".xdfF iP,\f\7r^ Syl$aFR@HnR_Δ/'V71q@DƮCD0 qtXC|H7BEstL5AhYl Z#Zmx*)&h xĸAB6vHBT0 iaA$iT5H73AÓX3SSc$ &Jp(-lPBh %EäA*$iP5H4oBfJJLqph,5JUh!HTAC+kmq׺nBI &9 ȥ͙`KԨLhKJVnsI+v|ӰB$]o ?b\%n7PR~oR?iqQV(#v$GPj9O1*jSZ{bjE ;`$`Zz}@?_(Ϯ_]M?E\15Ze]+~$¹PtB0Վzxf2V OVJMD53WgU/z[78c) +ý:,ܡp~+mP^^ཪ;af,:"R'Rmq 9h{KUxy,!p%Oŕg+}F83oy1=hR/q{ޘ 0K5"De3".' ۋ.p*KV4~?KVV}́MU g<0?Lxtá[4=\= ٲmȡۇhmkUglqs"sbe{7'@ːHq8e9## ;*t{cE‘Uɑ=@7=]?"wZ~BL\bjItiIT Tb5a4(Ύ)]e:Z]u<$Αdg}wjӿ\H;JZ\4-=[|(\IBǭ?*%,ìJ~>*A\(rG-̓/98wZ/:w^=hzYpX"(ߕy7P-FBzy M:Xypz8N]]%27]\~?p W<~?#Խ]z\v@ 42ygq=&#-3pKm]%.kEKKDIm[/2ݶH?te 岖F0|gS}glܳOJ$枕VRiyJktZ^ 60@.ka^)O&e wh.e&q^-w$raj.xzl9? ?~JT⎋nn͟t2*~Vt/`Y+V3eI}Dzz 5aNJ8!B8)cf·On$nbźśa p$\ &1"?gyMqvu nIe\rm#+|K2rLk%LY Oy<FΩg8$)rh*dHHƪd0F[8\3@@U)$7 3ٔ ddOky?FHb*hZ1W̤\NJR0IDٌXkVF6u)R)x;֤y ` F2V%ƪ2 aPjc* L>ϴ©4% @\*hnG$nK[ffuyO3pA7f]LrK#ݐi_N5Σ'vBw[E' (ofﺷ$z{l-yuB^q)G/@v GU s?W`VqfOp]*Wc𡖴wO縕=CcXnB&X=z{3c_m⑏{ٌ5x{} h{Y߂RMEQ۶/޾:68)Yϊbvo\޻w;#ˊGJF9h5hGpI^̿ 0/H"H/D!/Ghfe7 a; &w89& o.e\AQ9c8d |}<`@H*"Rw G#GTG7~^tMQl]@ۨldžv?"wZ59/8=o ?1UW$L.xUWVHe/fiŇ |R7T*)T\]ͺiZhj|duUKTYwQۢka"Vi-@&xˀ~x|+ Fցn&ݶ)6(qhѓj!P0ʃgLub<]`oy`#߷??lkh; 'S7sf/=d?s >5 =Lݼq3- 8/wO)݌O*cvfNuWBiPo)' ~B#/ByuVQ-w>˱~;.I@ !Aj~ 7f.7jåʧ`RavwlICx=H),(y5W̗AXZkz6:?ւ&dJFfǾz#r9 LpAH) ^l|XHTv`R1n`!CR7ɸPl#MTCҿO'yJ-!\C%~7pB ~LJ&&)PV! FM}d5}p m40D~#4I6P1 9@Uyl|qyBT2bq_mn`Ӿ/߸I̋SЗ9Wgs1/-i*Rbg,5KgCҚbg;f92K\&R&3lu"(gTgwt:i|2 Y<@`7CXeu%E}PJ#YAw)"/Hcj*eԪj݊lSPZ:#-@ RP[R)OVī LRIJ<5@AKkn4Ȁb 6ǡ^"MYJШf~E T?kR+&Ӌ30Ai%ik PrX]P@H}u=ej{F_!09DRWpٻE &Yp`n:ȒF<jJ)۲IQ2Ŭ(|꽺Z 9šJ],USn.؇8$⥹ON.ΠUq E },'_/\Gp D ocU:9 AEf4fq+rI1Ar)fj+˽9g>QZԁگ,S]ILoj+x&5@hcNkI P,!EWnR UF20)R(D;Cu\$uH C.@%osLsA9c l ςa)5CTfAK+y_S>Uy3K fY&H/y:E `1ӎDdpWBg[o_́iᄚL;CE^'DyK ǴB%t*iIi⑖ :nw-q )欠WI n\:d%- eY6sAN9OnO2(Z9m9 ,~#u|QJ֦@s6dxkd y)-p1)S}' Hf4Zs5"r lXg92xbYM5ki&R%b, GO/e@ҠY5bZw)`snۥbWʞ4]|4iw`WDE=&M,|*TMvi% /JE:E^Zh #vW:qbh}BIqҾ6vtEGjر~r ]A`J :㊈=c#!#~}Z[P4>1G'O 9G.d4P} iSJh3 KIAC纃i`kQ=PY|.dW4؟MH[E]u`Ʀ߭kf/׃hh++7P1.s".DyJ-:ꉍ{iRETu60%L`]ix Ӡ.UqL 6H1wJ}MV?!k3;h9SƉRD "Z%G~p7pfܒ:)\H$ &rFS9\n!۫t吁$Reڻ;&8aVS2GFd)wXIwX%!s&n4 i2Ae:ZR!;'b*㊉FԹf<=_9خa=C<^L.:\/]`;Kp_{agdH_{րxqg4B4b>ZxU[s`Y:0>ΞOmgP. "Z!; tHGv`cCxw|"ӸrYRQcw1 6~DZIԙcڡ1 tbc,g2VqG-\$Xs!`?OD={mf]O6Mz|XiJyU&Vb\d>H*C Lh~]5 o.#^l7_N0B4S(xtFhK Wqk;Wgm,܏5 ;%z]^c11i-嶿Դ95:3" XRIs>ާ3˭@A{t{F6$e[QV|HMK;N&|Y0hȴRS&ʖnpu=w؋)aR+mq.YH7 :yoH wTX~9 vi)K Dpԋ=|&,dd)R@ i,8dߐULwU9H)&@*<5dN ,C021Qpl'=" YG L(L*RpCRv'!\ŻSL R!CHnk8EGRmykjɏCJOFiv6ՄU۝ :cZ.]1bǞ-8P g^: jp_3%&rsu޲c͆y[ c=Bw=OJjVZ 2nTV?CUmPO;tl J2!mPmmRL4t(!}Lm>fW*pzR @bRJV]nֲGHch))*ov|ZFpszM\ 6`a:"\0 Cl(辦Q! ~{|JQf5"\2"ZGD9x0Ӷ:0dznjT 8#줻9*v5J^I3*c %e)Q L4ձ,dS<,|yqaWme(l<6)SmZNm鉊}.Ip:JO`zk8NɊdHt7!Ԡ Swҭ*ҭ`T5s3^:ƦZ(YZ+b[=hvߕO=C>(@40;LA$fuZ_*SPCiKOr@I ÇǿHbgjS^ZbEC}GrJ.EX@"ݦV<@@W5r ,@ :+쵽1ye%N\of-taw֓+{މ۪TTTtÈB\lOӢͣehv yktt֯ Iw+S}-YkEl'M @ͧfvq>*">$>naR;hԾs.~-ߛpT:g| b@)#KzR) o;nnq뉱׫]q2'vhφ12}g/^^>lǟ/V_GxC1:{t} "zi~¿tru1]خ(NŖx`< 3YhYyhDZΆH˟dZ~pMުrXsNo5o4+7{ʜp 0HBidZS*ybW}קBcQ }DJ)0(*.C,RըL>>~eZ05@U5ka<_;U!IOa+1 TR'NO L~NMtD"8Xτ"D5?TW)emWm+ xgK _iH~ 2.N/Fm3diFq *A,֓,֫!= \U_ &:q!KvVB SrΙvBjP&ZqNS$&J;^s6!㧟DI2PkT6nb(_u'?z|:e~ GWŜ&w z9r[0"nl HL]/ #?' 8c2%wehƟbw!|woIq~-B=Fr̳uFq9\}=yZ_,h5-mO" &`}l VeKF*\I#*VL4+}P~x3u'/J G&btdṂ{'Ӭ|"_yHH4QU4]CTʻ>^]sM^4yc9!kOZHQb5C.<A2T d<e}J%Q2UxɖjMU)d̍tZT+}T(~""\JptJ.M[Ѫ \Ep֍ S~Ѱ\lW,uXk078[-fūO!&b⥓bYS2}5coq8[Q;gq4)yVc*.m*`һ(%ER]ԯobcl%ix?4y 6:lJ蔐;P^07 bjaWJ"u:q?Ю/V-{Zg ;_{&q>u`W0zJr-qV)Q~{ZyW{.k9FE\{v'ÿ}'tʝʔqH";@JʖѸPF~7Mr;sbpr\1'jM&d: w5!HOD6BꙎKEunB) KOE+oD6ODaj]J}޼{òt~/_Z0ӠvlNCLJ.$Wqhr0u%^\U TaZ!׆@BҠ8/;2-)(MuÛ Qd|1jޕ$b6i}b{wYN6iQ$$QRJN7w fe~q6 g6aR1a6kd6Y+o͏gwů󯻞F[+icYn:}J}8IM!n"\Ikr7k=o_f7ì7fRs{5Ӎ ?nlVۗͯT[ X黖W?zo@(wxߍm~9zO`7UMPy6>zyx\M_ڿnijYȾyVmOBMXwƒqeڇ{ P&p[^G!?nԿ?Yhb?7Y 3?:#|eNJ3çqm51Fe`?>")aVoP A7ޠT{joP"A#6L 1I6 ^A^{~pM^ìxzِ7G i]Ghɫ t aZS^M@#N1yi2]seam6{.<7.>d3ώ^4apʓv{<|(gs*&֖jeRzDz.ɔ."2sֈްwu;藍5 d}IkA;"X37L(dQ^F Vi Nh4Ht}h5{Yq |DRZ".A"欵 ύ6TӍcywOc&DDtӕáJb7vqO5oo糭H1/5._fmۺvMŃ$|l]nZ#{p)郅mgyD[ +H|qϒn`xl>?& FSؼw<._I,?p$@<5jNK^&-ȇ*]єp"i{["{X(&aR[Ԋ?w2Zڑ3{5cVԌ#fFSHd״j?(}o1L5P$Dt!H&=Fpl}8߅xҥYsཆ]'2) 7MxC-j׶v-'P^R3I(Yƀ})7GK8EqI jCWT!2pG|(`f<5E>TrT]O*c2{K :-IH:K։%tC^-܍@˯w^k8(kM0Q{1냡X6QC,h s0T*D;*zM,O d FC6V=ӽR1ZJőU5Y%.Ix4^YE$ZK \ .J0K[őծq71u{8S\LD^VWQlY"l3Xn(j#!}QVĬLe"*Ӟ MZ|ŬkG>Lx=6V "k T:b6@U.X/?fi:IA RFdDZ-@X(׳Ax2;dTlQ~tx<ۺ6ETݑ5Oٻqݪi f+Yi` >j`Ũup[P݊\u\_+1gݗ!4Cy` Qb+RH}r #x#X$4ޒ`"j)2nE>Tͺ*܆ʳ*Laʚ/kF]P]vKTF<3&۔x́Y.|vx9G{x03'+ΔaBwKBw؊ Ide!rbī W,j|L{Q|ƳVO/ ۵֌Ĉ6&rzxYW5@7 i'VyOAQQz'bDhQCU~LGrMI^; uf4 L7,-Kgtثãr@cb} q|)=n< $*KYߘEm$"ca'{ QIg+ΓfR]︘~s3Q5WSy=,z05{Ww`fT2/zb틢jيjnIT1FZFJagP|UH%IQ"u sS署!N+ȞX-*Uyq% )oܨKtpJ̔tahvBfu;ƒ"=gc *CF)ݹ(bG`Fa[#aa:A)=bd# =~`A ͈Q$7DȨ1e,W©F]uZ12j 0 *qSa)f)bR r"5W%xã:r)D\ ;G< PXs8}Aܗw|Uj5P [Qdm"Pzp"[[ON㹩nI>)2Ip%(KJ0U֖Q䲈 `>!PC.w1Kx/{t*"g9)I*Q#t g.q%ICvPkvbmLB)ky[d&l@!〩<ژE>T-3fZؾ}_:-GDWW qHR)?H:dTt̴٤q=%~:?La{ Z#,-qp"3(sQġ] "}[~XuOW|rD5091'+6/9tԃ8Lf~T/tƽbK`7) dr/ׯ {v=gr<$7guI@}8|*&i- UHd-61K} Pf,ң>ծCF}9,$[% vpGF Pk06㊻rn܃?4CFōc촐2M˒ʤ"6䨵 |ƹX 9F\j*KNOzDZ۠E>TW,\nzu$Oyy%l/JRDդCag#xw6|G<$d=ɳW#ʼ;R6kf1ߊpɯg$9]Hu*3K& ֯xPg1yxq'@<E>TR^a^Vݨ3gFr9_cuhȳf0C 0 3OnY}!q~Gž}dgMr%wޯ2HZ e94ÚO0ɜ8rP<~ۗ=~v64OG:&˚Hdrta-P嚎 l^Ӄ}Ķf+J)+2j6ٵ9md@/tTjsVwhd=brkƈZoB=K0 kNd0^uokAKJT:KXj<<"8i#qpp"i2k4Ҽ̿Х+OY;\X߉$z=} f `ɔ&Z,VCf'}Ǒ=!ry`iz_/vDU絮j( jk7Zxh *'eq9LXx1C;8?ϩ_cc MW>6wͽfPI ݃ǜoؑk&݈=.Md1ҧ g2Y:bq> ô( [0 )z-\z|h~r?Y?W:'MS?zI98vŊQzP:dTcЩw'~$c5ۚ7Ư\\ȕ&jT`謐#fbW/sq[P+Mw&TF}a&2A.? _Eh_EԶ}_;d|+N˚}{XȎw:8aij ONJf[x}:/8+Bh+BUWH\AQqJGndDwTNhn$rFSl-s3Λx+,j(J(TtzM^/L: :&AS&^U Mi9ӦHiáKcy&GB P9_v?N7n:%ۣ=wS<̈k_`F*JdbWD0b:z'.nj¶"\i,k :[Lk>aUYߨoas8zznYDՔol3kyM0.C|!0=E>T^}`JA3O[jZkAM-y"RaDW|K\&41NX.J,owC0CP]CbuxW%9v9v<+.n/u\Ϣ8x<^=0K:Ҝi\Aswt粤⊃.oKfj2*֔bzbI-ݰ8T l)YL, D9T1lx?RuK%وվӡ֔T7EIrL® %P ǣTt)aE>TR^n}ԣ5fh'Jnםvw(!fU/獇W]KǢ* H1!s8GH#1nS Mdz0@ժqɿb epH>%Hp`ATn۱3d[j[6%=vEXŪb}īlVT{#*xzWi3=Gogh"܃c"Y\1Sy9szFL2~ GٔrS-١),*8u=wGh+hڼ^L;( =uKjxliZטe5vOinsP=8F:-$[v+?Q;CԃcID,+֕ʉXlXFŻyk/+&d$a9 FrRUȄ3O(<.twTf?t݃cġBdTI/1r+Ͻ6&qi,iY2H8@R.sm?5 ҥ5jtQ [}G%L~.p?֘"=8F\C [}F<3,6_1g/ uSyWV:qF"D?MI8C*C%{8w-/}Y)-WSJ-Cr9yrzy}㦎_yAdzT-;ߛ/^,:#%@*q1䯸Twڔ+XG_mݙ-:f|?_ _@○ҁVԀ)19P-+_oO@YW־ϙI}r~6Z*?- O"te;ͼ>9 hgT`8 1/wO+;_W4}ha&'?m/|~٫ l3h<8_!6*\6 żcgQVuMAQc5ooC~{m}g0WOYK$S_΂!QiP.q.4)řTBrJlQωV$6g&owH&0۲;q P Wճt>]|SV_gNrt'grl%'%ٮLO&/E$<<4DѦwYMr`ZOǷuP6Gfڛ3<2i_t3ŋ; x#Qᅢ1Eq=^m!s`๝]0w& lV=~9t`؍Fab]/uY.nnS|0JK ](2_N^~>#{`/0 "yc HK)H[Ygg޼F~Y]YNŤ/L?!<Ηr2ݤ ݻ3diVI8sn껢}Is}ÖVA$| utVkՔ2y'oW±ֽI{Xvowhǥ ޹ {tyM LǪjR_Sw;{(ڪF|%>H}!䆒<;J(y8cTܭS HWT) fS =^OS4GfP}HFzu"!$'߿H Dt ٠>FuߚJOŔ.:>ߡzCB  QCtΫGT780xJbG~ȩVqrF8!}SF"Sƿ^5aQ GWfoE)ˎv(JLkÐPDTgrԐT;P.$siA>u;J>dZՙ zCL 1Qhu 6;m՟:SJn9-CI#D1#h>!DPR]HU=4~3ގF(^˼BǢMش)S )R](+layq^m 1zMܴ>D,7JS`}22bPo(IU!dJ }vû|em#{ x A1wEl pDȜ%iCD3SK4#OЍD֟~1v|{z-EԄm9LW^;*{gKZUi%j{bTiUt_ޖZiJCt}߮Knb=5<3bC0 nT$toV[0Za ܫg)AcYZMVKb,vDTryZjaQ *zRCGW21Hn8"c1. DX \?Bk%ω2TNÜϷ[Cohy 0V~B~J*X$e:%!8llLxؖ$ %"K2b`9˴)\8y zۂĶ`q4aA8# J# ͪZ Ǚ(;-gu?:>75f1(R>^DhE poN3bʓeYP싌-@>TRdh8+YtoD@^BubFLӘPfUD&%YbS5-& #|́F40"4&Z-,Pag$8  2I+hZh.wWX.1kWV0|&:U0!3n?PVl̞Ѫ3FRq EhcitJDF%Uey"NaZJB& #5I^UkE+O#&ĞjI0JI)>M)]X'NBv6Iq΂)~΢i "]xū6N4b25,`\y "`Xbݴ$lK`Nch`E_B`Dd>Tc}aZ9PCo8 0`1;r1[U#y'@G/M=A(*xl &}RX*':3HfUAy_Dm?Kg0+,A8mRIBs VnB[S,BL,+dݛ/ F3xIoL3D~T&e{mv!ZPɘom7a,C&0A߇ykt\=YCfl֯M/4CA b]d[FWCf]&.ujrlʡIMA.̲g4-0-.7_A%;z.1f3|験;lb݀;kAY-nS='Ul'+mhQ*E~Rt /ܺ-q0YB1v;~_GNZ7Jb09U|*x:9 0Hq*D=*6:Z7brw,Yx@H)D%nn9k^ƣuSue)CQICurtQ$ji}qI-Ir0Ku) hs&<ָ 2UT0*F,؀soiej>E صodQBcB1|?MY7T'4Y8D8ar%dSC9jG!UcUF%?k/Jw?>X4!vg`IrP  s( YR.i)PS݆鈸6R"!%mƐ")SV 9a~ ϵDy8E=p?flFo1Ļ(T sY8GfaȈT%olu0 %+x5A+cX ]ԊN 9Qք NEY2Pcw? 1"< Q)6Gmhno L[b 2"ZȾN7M}d:&eyuf[=ǿ۠SZƠ.y,+Z8;?Yg`rLS7S$ONI"e&R2 R6TpD()VRl&4/5Q/]؈z68ec O hVB <UK$2;$ RibͳlE ӕ,=Bs^,P^v*s48haZ˩]PG6er(KPubaMeTWNfc 6Iu"(•H:K)]^(nt;)h\ .]_]Z<ՍJG.8nBϒ*w6d# . ҡ@i/I3,KJ 40sb%RFcx^0,c[2x c؜uJ;~VTQnS&Gf!Je¨[TE'j NSر\#2^j,pPT&(lvH2G!r``͓ 6 \QB4Ҽ /E]tNFeC2aBC*r ?joMț qD.4=%7l㔼G8gx<.` o:R"r0Cԅ+]U.`^t诿C ul IQǮX\M%Q'?fc*YzBf.8UTPQfeXe|YB W+歸Gq-!7"8!7b<'2lXITc'*k>Ѩ(I#&*!RG}Bg:%SºGL=+/Ԙ PrG@`lx^%ʔBX<^4C託&&1ıBRX੽ v S(k|b! 1jB^FcڪTe*Q'4bK[}ѯѥ?``+7zzhs-,Ap l`rJ*h^p+3xOm  -0ley6OSԝBDi8<1]'O 2"DCu4;Oi%İ=2E>'H1bqNT}~5_^Ohh1Ļ#8g5Rkǧ~NG C%M Ns3pv|Zto UdXQݝ.T=ULr(}̤_SݙoZOj+˨m`#K?!dmQ@uuw *$Hy!ɤ(DO: Т%t9Wc IhsbXuyՎ:90cثDsq!t\my[,.Iv>+`Q::4f$Zq&2 YY 0Ii77#cZ,bOό4Y(HH5)`LxIJ=fFUH!Lhlum8}Kk%Z!laPZ*>oQܣiAo *eQe#`| U~kJ 9&">4(X³$1-ȕ& s/BNt'R|1ıڮccm' ?1mLd>Y%ryJ0TU~+I-ul OyplfFzN^SkCSHe!C.aܠ X69n&*pknćo(?+ mU->!~vJX^ qssp ?sYoϏ},A&S_#;WOgj _(B^&#OqIZJ-qD&KUj7͖:yqa6F0[,ay_:ш0h!Nm,RS~DK^GJcc Ƶ6U)|GoQ$Je|?ő\'\>r"d hN;wI lc{7?w*%Uݩ$2\ƹ;pnYEEcE(j- 3ky[8Lyv [!t^u3)oqk73N_UoU' W>ޟg7a·/On'#`Y1rpMWN5Mٵܷ_o?kZ9_v%{K<,ßgoû/*ͷYc' efРN>{mpח}{ɀ_`ͻw r_|s?iW_>{nٶzXe*Up}p"糁˼Y7p({ jhvfN/mc~}7Vm|a귁OWU|^,@5W5,“Z5!%%˽~+rc1WnKG;HO6WFlu^|Dj.`{x6]ttT:vTq)nzr^:nRp`B~TR }zP[e -I `fABحX"v:/wg-?oQ߬x mZZ;8tFxW,CRqJQ͙:[D/:yL`%'ⶺѤkw0u+nlǧ5մ rZȾ f7MD]dvV9ؔMQ5Aמ;;?|xeT9{]-jX_Q">#J=+ГS d> [=:`I̠r"]dͫ//wa<&BeDŽi5 LBF¦DP)~E$W}=^EtB]+c[gWwnl,ME11% %uj@6kVIa$t U*S !.rMExחXޔ|b`ooKn%pd` HT50;`5'oۗG?_ڰ•)/B]w7ewMaVi+]ݖC/nqkG9WBr9/bZ~>2R/#)Sv(QCYo( o#4*>2 '}daJ/##?>I]f> ] v?udwkiTV۳CYp $f% J6drǙ 0U^rc62-E]"<2MC엇Y|T§5_g?|wvqxԾ9{@0nξ /__2v|m CH07;zCz7FdQ2^ ;[M 'RKB %[ BLN7{Fr``f /Ef D"9$uT(mWUͱ լu]sϖ~';̼뿥)]B=$[{EttǬ߽wGx-(b<(PĮ3%s5ԽKGQ=pgS7R\#q- r`I1G@ԒvaVg5XغoC~u~pyӆߴ/jïI[&WIs铇4r1@mƳv:Sȶ$ԢvH s.ae(Hka  - 9 0 Aԩ(7؟X_/#} 4Fh^3JJ%Pέ!9xS"'47mV,7T)?Boy?^| ;.f as+Dl . +8ߘa& %}FO9iWQ7MЮדe[5k YLB3ccIݭ&q"O+ O'O9 DH։$DʹU|/sPL8*[t_ll捇aZQgd/?ϺcR96??ݝTZ}Y3N] -Ҋ?jŠCQZ뫱 Z4wb;19iD(uY[| )XB9C"."F !&#wƇ?|>z4pvRtkrpDBL g(=<#-8J^GuYLc9kQm҄sɨ$8 !.@F0ZZQF6~:}>>,UO8`'P6i` &z #ǨH0KH24'-6ͫp/rZ5Ϥ*_+EmmQw1"w; XDQ۷`^(~.ʏDqvwp>`*sD榊\;1˜&WcH>ZMGRX3!xdI8'lTx9i5*-{j̽Z֊#UDQqHfdEڢ%h}ya)ʧ%fsAׄ )g,0ʨ?2ڂ#LfxmU|ȉ,!NBtLhlwkDA2TD96׃7 }as]"юd#HXG 1~4QՀ8M*"Q2EDVOW OP2S!D/}iCXD"Fo kǜ0apK$2þHs,k,fˏiEbdxwK5}Vm:#-~ ߛ֢z.% pr3 FK@(A!h B"8L%`cV#G Sm+18 7AϱW aJ=2#/يr5 Kg(†2Ҿ H^" gHaLs0KKnbVFcp*8N0S8Aaq ŘB[W>}LTf[ Y_ҟb.mT]mᐘVuZLnag;u,㞺(0L9.3 vN\cW@cXx}eGpI#18hK' rЍѨXM.gZE!S7Di#J+ZB,.bo θ]Ou-y)1DVN +kzA7v6%^C{mm"fݐGFcpD\٧0#a&]&p "#¶2#)Y0p,p~yT '-<_qZA%6J|k#;nit0 eYFu2[tڕ)@m+e. U,ZFÓXЍ51"Rzkؘf+m;F@BIӼ1 8Cr>' 4Xb2 LPd$ʍ% =2Sc`IEFfմi+ r4Jjh *ێ+c%`<13]Ah2kGQ0`'9yPT&qVFcp9[@YرF7hH/u5GuX6  #a2b)ZL,\LYIҺ~4%kQ}1E# dPV1JL Th\L Ƅ*'ۤ14ZhMbge b9Uok y)1ZbvEƩl/Gmޛe}VT GFcp(s ߀.Cl!=2csN䀌@m_N?Q ?SS(9r0pzi/ECXOj]kؖd5ƋNRy?InUwӀmQ"X%!ˮ*JbCJ+F0J0fW ~W5 WR#18~2&;(˥CAG4^ќzM/=29,p$HPi&.! ,"npN+ i< PzEzcփ8dK 7P):%S0.D+\x]n@MGFcpXaȑS(#-8M.@=2C6"_86J#=~^yk^ua5D >n>_ mрU(4yñ@VJ <Їog^4iiT ?||Bf\%VPcbΏqۂPDvN׻vߚgB91+$m.WR{eC3Y~>8]ȱp`Ơ*aR)> fݹbDf&%\VbV CTbcY۸T8~]4 ԦrD )9#18WOCR!A¾Ec䢾JlA(Q< e'PJd/uS }tYg/)OV/|]^٪ϓa(aǹ?U|`n`l{~vCl.yp"=}뗧;|w_| Ǣxuhv=͜_~k4Ưce½F~{8,?DeZ1wApt\T3&H e0"Nn?> 7:.o&3ެga4l"LuMdtz|/N /?.b ^yqoWpL_\6ݿrӌ/ͻz6h+ACi?EkQ5"1TIʉ3w$uu"ES'K>oggyfogw?woz ~v;\\Vڪ3y׆{!!ul/> \ ˋ|3a?оCC|̿fZ|)V/f+LBܹ捉/_W{i7MfZz`P`'6/0yQC .Kl6!8 o~(b}s}f |=]?ҳo8=e5Ãެ׫ˋ .9_~魒xp3V ti֬usK9HxeJfs<ޝ4YBG2Wf9.~܍AB3JEͫ~y Ңⴋ&El>Mn]cFjKwhz&|&P~^~/i#msKLA>Y;wgkKִ u|5;0V&A])dWt\اVN+< - Z9!t.V'L.rQ grr2O&:TbE^l=?7_-n ]5XDLy3NmBH1o$=nlm-?ӟ :7W9yW1Dq*&|E#hhLz"#!i&/+`L%8qj{@ndV/.u^ au7)mVg?uzoUGk8D d{g7 YN˪zMvrdjp?0iy:)%N.P2}Ww~%,_ ={ nz͟5ﴅ8ԟֽ iaMIY벡 v ʹ٘M-jӝKQR:ڵqù{е}ϱ}NQ) ([&~BW~n:pH2m՞:{=}Z~SƎ3ر7ؾCzf S |['F$K$Ih|oڥ$p bA8q:*IOw{]oiѡ4uqx'c`]A׀j 9nbMxc7hayws7b<[yy4U(!;W2vz1^'PAѺpfI00. .zܡsOrpZ, T;Oc}=Q;Hiz¨i{ܫo7r4QAI[ MO@. (>ߞddN?H{A 9mںώ-[=1`9c!c4 I ,dk;c_7]vrԱD=!GR;ɐM;t4؎1 C! F*C q0{3!3c?,Ge!&Z挳d!m5]1CMl|o2A, =c=1#OB-vi8ԼrZոMA*|\5D'jH) | IeJȕZ)}q[9V\'<&{U\r+ˢZl4*y`Z*Y*wڈpվzCTɲC<$Pސb!,R".jY6!;Z?lD'5g+8hp4)geKegkoEn#mX"UѾ:ʌ)-% \tܗ1J2\\Cu46 3(-TR=ȴRfNkXU딣78isU̵`R օP"$r9KDFØ1UÁfp̪ЍIԚ#*Q5)3 DsMN(tw'QIRyzT_gY {Qb)IX**3tP>Qѱ AcF yPXYx!ёp#(.Xj吰N+<*[]p k ׂ:pi*.Up;#0pZ(ϤLTK`y6dR%svu:JqІmU&IqHXl(ހT` E+}5l9&)˲X @D/]e"(Yhsk`P H.9msEΡd0(l ) )zgBO[TuLU\cb*A2%gMBPh+Ao`Ý1c˰ҁ@:9.g0!./5ȳBlU{CJC+8f4 a"1c +A$\@&XQJ'Υ*"6 ?bF50 Ü/ p J%[ LREٔ<8dRNZ9 @ٰ2`F$}D8JVJ!(Eq i*5c`qp0seĸAa VY,JW/u~ 8U Rٕ27躴@5=*)Ip}^R6xEbq3jՊ@!-nB{H :1H5J1 R DD(o aXxኵ&!kE~\uXgLh0A~ Ғ/[6K<磌1E%eȇ'x؟10a픉 4ǗUSrTir5yW6yx-X /d:pa?,}pEf Nz-(] &W$ZʔUzD&8`g찰"R :(%*  LĴyE!}RH'0b4SOmP_ @q+"qY:gyrx`vYQdNI!/+I$ @ƒvTv'SaA[w5$2("Xkxhcw`܆p9H"P1Aޅ\*@Q"Ed$e` xG:&oC@F@L1@pȃ D rI'#Hy*,^1bhdhQQ1 .8tHȉФ xdg4.6I8K&JI>2 )e8bAF9\ɂ?",2FaE^Y`H# ΢b! kӉTIXs?#r!UPg!NE֨p R ͫFjWrVkzYߋYuPXPKFMw (!m@!.yhM:)J ҹdM B`ⰰ 9xy'X@贚Nʏv MuYƒk WP7]px3 L[Fg`*@Ǧ( Xa^$]kpjhv@ƸAYã=7fr7%xKDM:39^nxXh1k$Vjr) s A(<"!U@%NOp'r5a5q%ÚI^!|&+dE\"\x>W' W`"`N#:l3 #v*DzC!E 9RŢ4(JF|Rzׁ),?yDmX)+gaSZ]F#lrGC땓d7mDz{$^S$á~g/CQ"evs]S]uN5܂nv %X;z l$ yo(i$j0i}Yzϕ))!Z8MGVq ~~kP %3%DuiM)@`c(]pUHL p;`! `ZHWc@lci_`J0 3Ƈ(ҝƞ F=1!e曁A0i-HSu*JQ/D l|;D&M`C6]b]; 4O%$!1ETWAC ( (B P mWJ~5(\b8,Qؤ1 b.EixiS|rVBXa>0BtޖS .Ԉ (!܁ bM= st: d{Ci~D3uEN!Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAud:'u@\?Q'm?$|D0D!Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuT:"eO="$˿nEI'OIRJDo#RD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HyDaD|u@\j: H:JEuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ Q:HAuD$ QunfCxpkfAM}b{~B?CդŃW 0V #^Q8@ZC<yELMp`"e֤\l.^r*it8;? uk{ث惱ԮDŽgMOɳl<4  gq0K$f-e/I\Op8ʾ.Y_Z. 9X>ZNI=>IJ^UQ8|I%[TIizғWaswtm#.́U/P}4 (@1} pA@3:y[5 ;H̱TbL_u^6soL-huX]x̛*Л\dWz޴rq].A {IuqNh.8/#s!\[=}4-ѓfq/BbhEBSÌd۲2j˔ ( #PLȚ5{%υY#"}PXVZ7w<h `Bռѫp[W]~u{$IZa+@Л#1xaݩI;^.n|u|7+pW/]7猌Tg7e>Ri@eVkU8WX+r.-nsv> :k=( )MVؓu<%(*p4u)(8T< |̴7H`prEJfxD_5m2\D^&N+CiP \" 9O#uQ iDDDz_n~CF\*n9UH+pIZ2L⳰@ļ̼|̤,{^uq424Q tЬmV{*q|U_U||IZOɰYJ?ZɋzZ$455<խ> dztr9:jk~osӳwyڸ5s(P3ڰhTl4R'YNXNo7r%U-fYq ?ֱ;l ߥؑcIggJ!/X `NGu20vՊŲi [kW/|kj08ו$RU8TQx%a#rxF(etXM]ƾkE"ΉAmӆ-!p1MDZ\!Z ZU:Kv"XG \IF\*;|E=>^œ~t}ZOﯻr@'M5to/@}iw5HϾ ܇n{n* JGҨ~o˽ĹZګR ˂/K2w^W"՛W7W۹A{A{LU|"Qj8{\w0W\kD @7(ɍ$z\뺁\:r;7k|5vRڻ?k[s N'7.XK7(fl^I6~vo68xHguK[ruK]Ͱff5L>QLo8t5owٛ42i'Zk9\nt4MُñOg ,#zzy VN_YReȯ_6E.@쇓?~Wo~:{3O^Lp%hW߶@-?N~ټiw47mIfn|ve;ڽ1}Ϳr xnr iW!dx9{낒)r5X7,ŷ H7>ӑJk_fM F *т9pqPFxLQeF׫"ÚOqͺݶO«_ŝ-;itJt4[3c3.ғ^o.E;[KЄr r}71꠼Y} خqMW? K+Yt4Aקoà%Wm]Wm Aכ,M]9 ; :yPw4YbկKW biլ.vBU}mr{J(ܞ 8YV 鎗s 8ev]>B8Lᰥ1;Zk~b,RHer \(a/Z}l:CAii|.zlG "MnHL=5$WCTA \fIZ7_~iTUwP3_n}ȞXiIzzR,rn./-!: [1[)I&,do6%qߗMIZizզ$ŪMRk=jndq TN*mIry gUn](b]<BױlRfO"KJ8 -jc2W?YLv-k_ \w%t[pKMgqj d%'SKSRO]!J^ 1DE4.$E:F"zTl<x6<|;AWfMX, W- 6|}%\1/wM<Ϲo{Vh~Y3w`٣5wsw6 cK$z^*c#w42P+]5ʩ/x,]yn6ՐJ ) i-VH}Ta0{bxEAeЬ4% 1C76BdR-4BeJ R^H݄XZ__n3jJv n)`cv ֎18v ֎AScrsZD\0yIW^*u9`MwP0դZvBA5ii*b BIXP`o-LSy3 WL+7 Oz`ȧ^=0IiV*+ =/iC5. sEgbL,r\sws,c7<|]I$w,=l*ǣ{uF.sc1^: EBSÌd۲)}!( #P%Ct;i}Adgq|퇵}<#ho˒3}(.jˬ֪pTIWj\[a_Zk<bv66Y7Q p%  @'TʂIESrEJfHUaQES?jrMLmy"8v;vQ}CF\*n9UjM+pIIb2,;*$-,K[5LU[5ޕq$2?IvVvCl] B׼¡˟UEҔ8 $UUտZ/Fj}ÒĭB\o"i{TVT˪{>Hs`n+?hV hv|5 >-~| ]=>=5+`ZQ{[ajԤ?]a7蕿 ͡C j>mB8G1%/챺?V[)9Sh/=߸$`A:bW 1qzCiFƅa'Ňg~%.z)_`L|omyl"o:f,eoX?bdPNW"`GCtFs.зN5c =]N? `;GWmqkrTa=5"krLdםMY,a@l8//C 4t%2nIJSS"x}h45`,\ug>~|\k3WՇK ܃aO}}3Mw%z^tLsG0:a5,qujdMޫ:rÍVMnux2w<TȎ A헓6Ph>4+; u`y? rJn>>Tq$nןZ MtT/6>lcg oB= յ1M.NjO/k=wG;M8^O$Fs[ Rۥb01|r{a4=' ͒f20`Ŵ DW>d+A{ rSpr5{m4>p\aS c A,FC[:)uc3-pw=~m{rtϿ;;z{r:;ɻ#09|g KyW~݋! F?߽kIuMۥk樧C9 8\tAQzZ|4oKW(aܭZpM~]g_A <[4?7T8]?7ְY?auΧKlBTPJy%`0LD$#8KT7almem|y>}28kBu`k`E iJ/0BY }0e5 Ek~~zޥ*K{_tWkQKK&w[冟"X[tcV}aܻdBF*=6Fjr6ȹMudXV'9lVJH@݅_`%17vўyltY=5JtoZ]RuG~Sa)s_W. |_W.LeqQD&K"w3$jr< ,X^b@WnQZ{R6XI=CZ?,2ͧl2T[4ZI>*K`L4\/DfyeR3eUbV*j})7Z$9Sks1S8ܞ{,tGZ^Dni- RHEhXyɵ <Y1υFCʄ —@]JCPaib u` B<fS/4ŚTx8w3RB>U,R$.JcdhJZh| %Hv8b+ 4XGJB–S!XNYc( p cKqXEd"l`(@7{,ռXrT.c0XLp.!DVHr{@,,b*prNe XrI'}۲vg=e5tT*_`q!+[GjWr~>mӒbR%c6(SZ8S)Wr&CȾzZ3~zIc;̚oOBƇk˩SٸrҫGUȪzUM`EHW+DzFX? l~\l%j|XD%~ID#Tɞz8L0+'Q _{Je$"_jY"`k aPIi^-/¼+s ]WpfyC%4#38#38#38)38#dqFEfqFgqFgY<~:NRIה= ~/nlh'ufNk A I.$гjO(@qUviޑ̓߇=%٢JHa1!lI($b," o&Fjd\)L|f;O``KZWup`_N 3C1u7לACQ>9-V u#I`huΥS1Uwթ&;g@P& -,I̮֓9 ΦAa0Iñy:_&miq1\#So"ZLkj$8UɃI긝.gPßpN\QΝQu\&Okc>]4L^֮_O]N.o/+f7\1};E3T/vIT ڏ~ 7^fI ' 'YfZzLNw5jgߝlW{%h{AnuXN6*LFRإ/P cŰqQЖFMcAqả ``;:{{쏟{wv uv㳓wGp|30/w64w%׽rntݻYT]Vmz:;XC oʵ1u# JOMjQds*}]|g_AM@U*ܴWENEL5G6%qdZ'6]m]o_eb} q黪%]X`Vn4mK"z+~I>2K@wV!=Rr'5a~l_߁ :GAL:?V^xi&S:X`(1qpkF&d=#ES<@2'wrY+Mı5K$b,C`(,k'T+PU\dߍm"5%2πfQe4;fTHݲRfG~a+~\sE0h`'W_ћ7,u{U}.@&_B [yߛJqW߿:݃tеB^[_-(!rFBB,ZZJL)0"4YY7 7&%:`p4 5س1 D<)Wh< SnϏ%T8 y JKI S"!&"S>,@%ƑauGS`ϙ"O LcVcZA7Y͇"Uޖ͍\ym ZP,w#(ds/ITqN48 o+; qW~`kZ+kl9m2?j.#cuDEtLְ0Fu^ZOay9uabuXLūرٶ]vIUuUMB0qau lǚ^Gsq>^W]_msFfa'0s`5ՠLKi~0W*9Mu 3QջN DqatU˺ÊӺ56ٲXY>Y&k ,|<+5oIk<NJgz{oFӤ}3}duW.ӾI2Vm5]ҸhHBjI G竔xCt:(lÞHH1tt٫oؒwM 8oTglc!D{@<ʙZ>MHm+ñ? ٤O~jg>M?6֏]VJ6qjsݎJBVm.u\*kz@y&mV0#*)dFRt3cw1T4ֳrŋ 50V^ݟX>lW-) x̑>\~G#C/IUL+]a=RЮko*c"OH! J6F᧛KR)J8L;T2|iwȢ0;Lؽ$ Sۺ%Kunzפ&~^Sb\7&vjzjjVr_-a29;(/]m>NR5_`p)a?t#[JCII/ GΌI3GR(lxpHDP.Ё-;e5_/{Pzh~+uKo9Ϩ\u*wMF$ up!&*̕C{믊onWg ('ߦj)+h2"j/ݶɨ=䅔I_zV(.gE2u' ?ǟ?.2ס^6\%. :~_WuhGo Rdkm$Symⰸ1lAb9ד Lw"mbtrwƫFwmyK;yۇ萷;yKKaw]LA{wwA{ws $Z;|qV|=`nQL # #\@J˜ÒHס|o4xPcy(cJ*eD[ÖdqD\J`eSX Bj' b*W66H S0@C|_Qk,t-, /?6Dp!~_>R^QKaİh4. INtmHLC"'T$-)n834h` (7GaS|FI n$B>b+Nt>c,94UʟNv1`MJ&XI3\bh˗c dL0D"T*c{D Nry^-|%8)jmcH򁾌?H#kpY-fI/uˁA ] [i<ӤStLY9OXX ENr5ĖnǷC?xX-UPBZ8D)LC MD`)8 ta%څ>m`Ag8ތ=˷wHSᷳVo49vEGBkdTh!h[%]ѶX C;0 C;0gZw]-wdqYAwdqYAH%]_|nda%MKM=MjajaU9a lG ]^!@_14A+egkEc Qsߤv\,Ks<;z2am &Jrb RarS='Arg {=W_#GՈe+x3 aU1TD+XnWqQ˻U:ᝤ;J>6MBe6 B~Q0^Tq\9L㙚Iy^.ϧaHw7״gn2hLUu?ϻꃯ&eAYq{ӤXmQ6$u{imVt|g)7(3~Siב**UۄoNZ%\Oy<6S4T-NjKPi'۱`h -̠TBP s)0 bq|J&C #ܦbl Фᑹs𳸰2:vOݒ6IDm3nDK1,Uo--Rzku3zM7jԪܤRFXS t.<{RWNݣܭB"iw"jwJwC02dVd6i%imd 9юBަ!gtytewj8tkŌb5Mt'`UA}ɣHW]l ]B:ĸמb3uݱ997H+ BH$yqr.N Pnz+ZhWJKb}0 ?0+_N\/Hhm:m&m[ 7J‘ʻlEÛ]S?S?Som-դjGo䬯N? OSǂiϩ".DXPq*FRiEk Ѵ^g?8$y~ɲٹZ{M{{[V itvhXb#VG#$KS%JQ*d yl6 z}[f9ٓx׊pLދ꒲IʔP%4,k|kKr-G$hxk!!ê@$0[󓫈׷įKW|g'</r$E"jA5MCD2KƘS2]ԄY5ux=伃żX̵qy`[{+T[-OSl|HCRYxQ"MJZ9ܸQm:LILFdJ KBpCf=BT.qyqe;EjOVp^9FMXvgeu|n}|uX%NW?|޺W% 9}k%Ћuiw wsX/׳[+A8_⬬{?p~VIq-k_| @^”@OJ2F߷qz%(ZEDWYXU_eqnK#RQh" )bK*ߔW87]NۿIgڰnI(z~324c MEz^b_˥'o/&1`A>}料.,IOp 7~q$1YSUEaJ`Xڧauc5{9Yo8WJ^Du<ߙS^y#Nu$w,@|<8Tdq@I19=ǍF%k zq{Ast?>ǻߞ)o_ f`R]7^5ټ~jRm1]]+VߴkyuTޠ -W{C}-Ye23[*c'7U/gT7^s~\W`ؠgE޸u|mT@ ?kz7b3;:>(7l՚py}zY/GWmj-6k9VCÔ 3wF1:Nѣ[`7'tUuB쬭:rɻF5^jBȎ?? #[J/; "d:h RKBLDf&ac>Rx#p7<)%?*}Tj~cYzpQӧEU0? ƕ=ʁZn` xz\j? %dcf^^άg]t8 wBIvs݉!{x*a:H6,%Se= 1tf[.{BJ| `Tr4ě>qɐ>b-5[LR*`2yoq|!dzi4&/C9k$㴲DZ!rյi\vt&aep6c+خ3+9#_l?Od+0&q-҉Gs) xiBrWZ;{5RqD `[&0--F1i=J3asXO|Z!^։wIEAh/rJjd A4oB S,d۾IH Q470T8&6| ,; ;NHnE5^9-z⛴TŞ4cu#CD "$񌉶ŞRPLYT*Z4 1.rE`훴)6"B"qčИɐ3턚YHRs:l(`PWn4n A /IygcElo pk3S/!=@| ^q6 l8f(q ^ HHsCI7MZhxKɄ2voVMXIڠMZhx MJ /, h`n=T[d`'ׅ|%W'AXC$M&A B;|E7f3 _SNn\cv6o4#BRiCo ,t@l*E= D@<&\>Лp:}T%-V=PKaTڊU*St@9p8♃icGMq5MPy["0օ0م|7"kZV 7{>FQ1+DL;> I+D`'Y|,MB@VhlPyI鸗j80cG6O@ȩ J 7T$ϡxF;GYW`ag򲸪3EmG[)pd)6t>J opqg. ~+!+*!0<7w B%.P`LJpGDy} @^`SRCulWq֞572?^0޻ki6{jl0ٳ㎺.:#߳aھnࣴ}]ѽۜ6ZzdiڣWKԢ:sH<&F &aDr &%D,=Zv~K56[46h[W`L/&W|[WC&k.w}fTW(-JXnRZoy~Y)o'ˏÉu Sϻ{CDr !Kldhښ"qI{0b"C;&H,(cjn=/w /Tfkasn߱TcJLBYFE MC{x/#G KAy%ow!`6)8:@()P$#ԹbMQ&AXB#Ӧ" hqc)cdT S׍>J 0-QtRPrkg} A^O.p2kGQ0 L+= PT&#ysJ_]}}}%$+TRcOi $z;x?M"r`hDcmt`)aSaSsvFP֘=`S_D-UPBZ0 nbzX0 MD`F=^XlHLkB<.hMĒ\TE3&H e0"N½2M;d.LP-cUKxS&;]״-OOnT,v9۩aG v|gmm$vw41䮮Ō(\=rɢ%CRC-3k.jr0t{qh}59KGA_J8T8LὬ?}2\9z붥?FF㏣y`nv4E Z:-T-FW̩hR=I%1␚K1=F~yLJ<[!z<˥c?OT^S/zկy/\38ljT ^&-gLj䐎 TJECC,vKYjg{" ǔojdrMrݿo3 ZsU.&A.YUE}mŪfKKhٲa^5 /fh^QfM$ߚܹG\}h`VEѺ4U]~0+`~CМMn5쫖W0MriU`Lݍ0 3zM6gY~;_5RݳP gdz3;3bk܁L0{=uNp]<%Kgt+euCYӍUUMg"| $HxB@T; QeS>&c~e2aT57F#hu'elෟFjן{gu*?/}⬗er},k0: O3,('*K"h6:8ϼe|0Ke1%6qc6 Xq.AC,DܷGFFQP`m`"Uޅ?0ǎoϠ[eϟސ=ffjc"0* ʐ pb@Xis[993z:Şۺf׏9Cl0nЕ6~ڲ,+ (yͅDIzV^Q|HB* R Pt:CII1gױ$x@f&ך5hf2gy `H,y4UUM8xm8Q#%y(.i|~ }W~q6:N[ěr6EI)wr5RuRWpkƟa|vpnGVCh%OIT "̢@#c_\ʞ @LaJ$'W佒Od/ŧI0~68W|1Flcct۵ntwu3w⨺j XEN?O+[ jپZnwvv:ZR0-:^LN|`9 %W]߀8Qͨ#`bUٓo/o 1\?t1|>-3]  F7T߂qǸy$IH9{i˶ah0,oaӆ8W (xtiNtrQ)<ɶQ[p$؋Q'"HX}|3I SrB9ƿFTr p4{<ës`ǿ8zoo_緿|囷^ջ߼up6%j>t ?o5~}+547igh]=Ɲ2/r˸wU١_j1?^ďOƗW9:NZᡂ~OZ2P _lHGne[ SWqn~.ߵYpMQQGy؈f+Ch _yN7):6ZWnLb3`S8F$!bGa8DxtgTmO1:[eN_=cQS#ZWX)F<%c<0> I:R0^@MjKm[ ㉕#~m?Y2^xXrcy}vv¿VoW-N[oW!oY5؀ǻVr L>\ 97{#qrQu U7 QkW L:Ko3ځWjEg#hUL `4+%Yk+#Y 2\TN&,'O &7q..=}VUXv[͒64[E53s < (RgN[?Ilf6p/%MU@Ae5U;ݙ3#,_ xvkdC-n:0W"̀O~ "/`V/~3Mf ,gfv+r+ xm>qi|sa>ݯ ޢ "ˮD7V c.#\B.z!3- r+m_HKuɌ8ABy0}_uhQl)YǫwWi#g th< VS'Cv<; 2Jr/9M/Wf 08^cInMtFn#=e;O/dVsڱ)3 +CChElf4+ EJ.>K` @͎% gדK[r79Əܪk?&WY]e%u!3CD 2cXdM )&y;I fJySn s>J`ͻ8aAhϪfoh#O 7/8t4HNn/M?';8t)}Zn-cJvHTZx"vr˧qH΅"]|sd{Eĺ?[m,`Yrܼ仹DOvRNF_T 301dRbJSTS:#vm3{Z5ue1"hk$Qp4 5س1 DjSlh< qio,YydcOL.D*2Y,-,xGa G:.p. :q4<$9> biuCxP9@Վ784s0Xdj*^ .O.o9^wQm3rzqexp5B{R4gNH i )NHɜ#&i}faL}62Xڣ\i{),Ӡg=` +/KAV"msQ7E !Ie #50@(BB$",RLp7Mgyxfp~=mC8N(`a<Yexf㻨 %l>=j%?Չ4Ś5"^3Na t6 !b"w\6!qJgN [\,)#)ځ. q$L[ @&5iCvҤQ+OJ xzJAL%ՍK+ m#uxvvq<> nǪ:::]*ACBI-Dt= F)uYkr"#lJqTQ( '1r8`REbD h:pƁ.hC˩}ˁQ;/[l.&k\Hɱʟ 9_M@A )DX0 |. ŋLUpτONI䀘Tzڇf&ptՀ9;OCPEߌvåd-S,G(yiLW>rLUw3^OkO۩T: o lNzEmN<'˷<[HWl:Q}T}U)ϧO~j:-iQLnFqm.ΡO?M]HE.Rt^]8gpcq;͇1%ЄO_zYb v7[lDľ[n6hz~ kڛ=]1i1Mx~QuPbNvjtk`Ys6٫j9yjܺds5_kgngecsv/ߖ"pF荱E}=&a(>Ws_=GEUi6 t1ZZIz,ƾH1E/R}ь QoX"0C!xƈ(CB1h+ZlWȦ:/s $Pfeoi6[E]'-UNoyڶ 4dݙ7\֢*<;߂;`pǬE*}{_M`Z@F1$q4201"2eX(59`( R#`rǶmvF{[#ZJ/YӡT5Tԇz,PHv[oizG:bz|Y<_lBT(.y!DM`٠05kƺ66\e?EYg|Z0xpQ8NM\HR-X7Q iuMB].R#FǤ"9 $^iST(`@g)Y[Wupn΋_4+1\xzkp'N)AT {: O@8cXA ڈ`hktA J1uf<W{:: }]NWN[lB\:$UX=v͇on=wQ >\$ 4~NMOU@>TJzZWߦUZlru} NGlނ.gߟ8JӺV\5Q%0XQ. ;s]ԯ J~zfr9M1?͵Z ~eM[m$pͤ:oN~bI {j놴wIk7MfyB`X駔7ɊNj>{WɺՇ}WJ^gle {8mq9_ Sfa,8ç{~רQZM ֍/ut~}~y{w7`q L*|Ӷ!4!cu7?n޵]u ͺmҵrSoЯb]N{oU0ss[ o|=u7ëT5N q/9\I]|g_a_{MFץQܕT"FxGj:M^ߖ6Z@/t3X-!O= ಿvVm%p;s_+IVFbJc'5 Q@0Ix~u/M??7h8^COb:ꓳƣ6,I@zOR[Y6y1weỳk QՕ·7ƚкvn!it?OeI@ݏ2AԼJ6`Lf}5l1n;&4`-1:4Z0 TjB`;[ MŬ|Η|Η͢y"oZ< feYaT]X9aE $nvQ>҃t 37K/#-J:|d5(ѹ$?5iKxi邏C\l-,Q3' qfR#l:+r]q;jֳ;X}N弻Md>Z7 G VuQ[ ,3A ;btm(]w{'7!7ߎLj/~ƓRF#LQk %I/P!`V0/2.DB,Yd+$BqVqMc.Fr[]Yo#G+ľgn{ ,򔈦HZodU")/Ub󊌌/"3# ҴrY%Xֱ/HǴ (r3z ",iP%LR#1s:ֆ5-eM˚֥]ue#4 S"óyY2^}ݾ,hW?(%ϼv4<0 m*CAJYEA{ŚzuIѱJ'Xv%0K )Z^VʾhH~dtA/Z"]sٝg YSjݱYWEI 2L6 Lsv' x5'QwLDon.adD.w  Ƙct0hH{ɱ0ZrV8Afx&$z !* X$<(J{lFdZ%{fFT~>"z SY~7ݜ&hOhU܅ޓ=wǁ/ (:j؛{.|;2[6 rw~b-(J6 G_8Jms]y%mrޙ7`z>)r&t9P 쐕-%ꢍ4iY63 .ݳlI' $ @0EA3XRZe:#/ɮN˅m,J9,q"6Ȱ,- -J))$d,B,:.6vgc뽋\IƗ`>G{"&?#!nI"%\x?m  Rb 𿠹ϐ/M#!T#Q(EY`$؀HhƳ,:c"{&CD{f% rJ7Ddqug; d82zkΦk8Q>'uy z|{'1!tM^?6wy6)+-SA ^x`. z$*qN48+o+[/$Bݛr;!+U,/GjP|7 )NpH֌0->zM\`RGTPJm6>i5c2;7_F<{Dr+ȱ.XAQ`|a8mFZ'BZZ!>C@VI  RV?8:^0P./0nYF'v]gT^"[jH3N;O-h2e _g__^E+]rx2M햢kK{ѣiYfbl΍شJ>x0 m.:NMń'"'xz\|;=Ang[vM䧗>>^›x4P~9?~I=·M Wpts`}ɹ65gݟV]X~>:'v/G?6{'G|?%ȐP B9-z7 za8VHڏ VZkQq?4`2W`x Rd@Rj'tmW\f΁ن '<!FfjfyJ VbE'DtIi ! ,z_clHXG"\JlB@˘V*5(K)5zqy>[k12g6__d[%}Wi-[1K=QZ_$=^tunIJ|]n(\ S`yDS#DK)1VcM (cn9$P\ 7x + lH0"TD2'T+xĆ!ʓH&tErBĭj=RVyjeek(F n%uBabDԮDc^ydO> ]2M( D# BHdP`<)Iѱ3ji#yIVvpQdtp>R.E5\<yGSUARkzucHb&2UYNռޝzЫqR0]Q(/WQ x{oőLIfWX#k`(tu++3{;t5x~*xUE-6`M@ZX.*o.:O0n}wSF7i45qd%^n+ z\D7J};.\^?\/~TP#y=LNoAwM\Qi}8vGu>zTg?9k%hsN6W=uYLGRGc~(ҲqpoTg۳BC 0'~ =Xnߧ߾퟿p77{s`q#0-'065@4YxUKVP߬j.U6{]*f}>`\:}77Q{Ǐ%NOoF_s6̞&xr==? 4ʨП\]D٪ :&i#f2_GAVnC7ńO; zѿ=*^KX(I,4-]vs-~ӄ%AFywHsIMi ԏov0?V^x0+1LD$#0X`.p܄;22[R?ctL˜9p!g4A'Ƭ1Xz'{`g w02v2o@5 LJNFoP9A;D@w6,WP$aʗlCw鰕NSbk0`>+vbKJ$U2.%bj-T1TkHۊ)™PSL(Ԛy>g8)ߧ_^RD O\M}q<))$q&eIYanRVŬ0uFEĉO;hHG}p pFG$OP'#!!x-mBꕱ˰TħҊ^*KW53`X+/ӲΒH_߿NBݶo?;Em'oYv𭋷EvvE{^NWK7(9M+hck\Rǟ}T39zۛ2ו|}A>ݥnˆ^FcD2Y+냉KM-a$EV 1.udaٝ.MAnɮrۼNQ\/%n*}MKKOTeOlKU,M֛ԛU7gYUu $an0PJՂ7X4('Ecef̬Y1O+u>be8RlD9 SÔzav)֨Lj۰o7pF[{#[mqI뗞j.ʝ*+a|otGWq&N_<ˆN:nS, X_BBv˜t67f6mCw࢝ݔ.1#TӤf\Mq`>O:+b.TZ0֩mE_sCnכ )k|1(jwSޖ'-FoZb1: E/Z6PFagyFEeKE8}֖)wmm ٌ?$7 ٗ5J\S$#RqS=3(CQP"6; k_i-2GCKm#iHc)\hw!$ Fj*@]B!(8vNxMw|3rܦ(anڕrK)4hyxy&7L$Hb]6 AP$khv` tV T M.y+ ?4 :!<I5EhIأ8. %U4.AXb)sWB6xјyX;zJ*7֫}^]RaȘ/!ZKB$9=D cD"D'I:G:vj=^>kb -f`HˋAL@ q=썻?,%,MPqwP .aƒ Je-b2CKdh~&-b >@=`KPc*\llX} =qnlKs=hԯEooịUhtBB@QAsglrD`p'%.D|9pAOc[v5vۖ/>!yNʞ(){w7)P`?N=&H'`s V?'Ecx*jay9XXrcUEjkVY.4uҿrƇ$ճYZߣ߇ 0Y .|)ziNZng`&"S||W5qJtoH 0g0k#t9<-*s/߇ɇ&.<*nco5Q;20G+>[ʦO%d.ͨ7Mr ޽9ΆziJ׽$sDrL"XTqWeL} -ݭ/*)pOsu0ܺ!'j C/}s& )O&3~j4STO^CoUp.wt2mNL6ɘKׄ(kI?)i˰i9T*܄gÿUզkϪv5ÓpT'upU}j 15w~k2gme?gQ࿙[\A𛓆!iWڤmKHg+'r)9ge|ߝ}R_gD7{Zy:9kI[0Rq9=g7UbV˹ .1*$Lj9~eO,Og}mJgYsf:U=BSM8 ,+šxDgJ|^S$)\%dW"WJ Hds)^*2I((ߟ}|¦sOǷojwTysd7(P $a51Lh tT #Ո 4AZ ڊޒFUbwKݒs=wKݒs-9wKݒsR,RU:ħ6]+ N>t^/5<'v).z9虢#FR;|Ϝ \6pN &cQ I+jH)EԒ)E |҄m$+, {=6Eva-JL;⢶;ke"uA]E//`}쏋ɾi˼way02hh2XrBݷB[AҘ/]' \~QRsnN8ҕB*NԶ0Իj[jma}o[$mkl[(X,َ0HUT2NLj9+!.p˴U^@^oIO8-d6lOJ93}T`i5Z.oK0 \s6 WJ)pLՀe:+ʅvQ>=.$7)LA MUux};'ϗLG eZ30{-#hFs+%+gw˅rNwɐiay88 ҩ8UaaSCB[A/lW7ղeKu5IUgA#DTOCTϻ;"GkQ* |n֊K%3DNt8]MSiXnn[8fSr7~Ɠl*@ (LQk %.P!`V0/2B.D,$d/4,r!ca*8S u4HP&$ZPBXG" M'DXe{A:FHHjbKFbR$S, A9,aK;бeִiYvia;Pk D=UDsrx no^}~e*IQݏe y}㈴{KC`K<\cwܭNҷ`m9']Oy"n'k̏=(;cteF8Q+4`%VDqHNG+ BPlP;ũ@:Rb&(cT*5(K)5ya8z<3r\uޠT('+zm`!RҶ;^'dN-58#iJl\ SyDS*#\KMc5-_Y)`O~'_OZK*%uIŅxxCԳ]; tLA.u*\X,$5N'1V"KA)،o0ǡ_%P*! i3$IҮs,OI`q\eaC_*pSLeD;fidr9VRDBn@2T!Ͽ?5qp4M*v ~"_} ;sj.S>V_+?T\4_f\[b.A/jR}۹"Q1~TG@f#1|˶aH03/,Ai} +L\L^|8L=wٛWz$Fm+)Ǟ:N+ a#i`Kbd6N1.ĿPЊ zrz ǿ9ӷD}:$tƵ9nnJ/[ ZҮCSZu9kXSn}U2s3[)v.m6٪V~҂ӭׄdӟ٠5Go\LUTm*w+Ǵ3ّZm5wi%n^a_2?P oXX`Nd @ TK`-*2K vV!=Rr'5Pe? :G!L1?V^x i&S1X`1x] 9{>Ց=>ꐳV @c1k I$%DC`(,k'T' P{u3ب(yVtVulWPtqlu¤%1`k<{ՂLo4I{ǥ__tKnӢ$ [ZcI"mX3@[b&9&֘gJ{8_KbG% ǻ du }J)RP>C(i(ؐ-qF}Vu~幱1$ˉ2L*$ Q!^/4u\x».h{N؝.+#W$PN_&49oL+ncpPQXo2yPkj%4b0ۢ)/7wo7ѣb"W2:<& ǂy@VO^ĥTF 5A)΄PTX 9Y.9哗֚s .] &>n_r\fAm)"gOm2Wha2B@G#Ie9$xFY!JEP,i]JY)o+;<1- |~2-O?[BKo'mK4][2kIampUp'ADJι+YRYpPq- >XOwfk.5?=CvHk[HKH-(46Xm:?rɧssܗӹzJ]0Do0 4o9s]`jM^`1NZP%Hb.: ̞,?ߣՖO ωTR+L <jmtp{' Z^FS!zxX-7\˥zÏrk[y}j%˰?ivpb=1o"ޒE Hח.epQKeJ9[e]v..:=w$ֲs~&z-uO;^ҚFlZ5cSP VK}l~q^e;|ķ۫Tw:[=ufo_Qw>Q›a4(ot5Uב2.; nqz[j.^]ê!6owF̍: |[2*9Ÿןg{Ee ?gc5.NX dj2qb (`P jj7K!NƩFeEHZD.j[: tN3a\݅]R%UhA@Q 0nIA| OK`-7n{kq4 >1"|XQW&K ҖR;DxH+hQ`hŨTI_~5s@-ꣀ,UM (3q8NjpJFKD3k(EkE&q9F5LT 48#Mͮ|DwTE 身VtTL6N½pX' q]g ~UiGW`l([ 4BxtoB&Ǻ2J$F (q0]P3Bݸ|şϣD|7}*>OV8X 'LA*\:a?TmN>*'w J-@ Ly˓0HR gb <00ճj}Su;]e% PRj"3[i3BDN HnCd: WyOZͅ, ϓcC3r -5vm ᪹#6LT@KeƉb9J#-eTluA r7;~Aս,'\ W8Z -$p8 ]ԯxjXr5q'ۋ5ʘtGkpkxNէ ;}LMrǢQJgu2H`I6Hohi ZR9,͵mw #F:*Q)Q${Aw!Z橏Po?5PDpJ!j9whXl5#tڻr>{Օ쯭RBrdo:$NjRkB2N AR-lBmekMZ7_7/>J?{ո[&*'r wq.$LP 0&10L>&nc fv@># KB*fJkN"EĽ?.$Gmh 'Rc=#3" @85I*ʃB#:+ k2Jx#eU|hCܲui&5m5lJ"^8u.taK BKOM,P*jZ#v[IBjz꼯D`e=S΁/iТѕ>|x4Xm.H GltE`z+ h GBcI8<:E5ӂi7ɩ%SmDd5LX< 2-ƃjǹ2D& eh h9Ƣ5!i$i9-LPˈ #^ *h= ?<9l΋\''Zy{HC}(R0t:CII0CH/:~PEw-3*#+qȔ{qvDa[ʪ눿A8KRTFPkGqrIap2)qHKLkIҙN"՗uN$G)Jj.ݢeBv޷Y>{5VG=qˑ8FJfů2({s5G)N L,9'؈h.z0w5C2Ă~t(I998Qsx_1.goZ|Q -'Jۛͮys?R g |Z*$y8|^D7i~"5|H pܯZoaovMpm 'NsP F1P>M.&vsˋWfc01gsors5 Nb0z1DBn?BnI\k:k5W#Xc56^2~]|8 tuUg7^9keduN6WƞZ8ӑ\@psU,!=prb7 @V N & ;?Eqׯ~u?~㗯S_O/qG`}hjh> |TzDnTW_mUͪ֩Z逯Q6P jkc[n@+~r~U>F~y5ծq/\BfF5? bvn߮ ; z ]>H3>ϻj&pM@q}th}4Лx-crRAIм΂$f@nq.cQGh{bD Zsa/#[2-GJ_A9"Ȍ2hD`@&0q*)/:`*t{FfsY.zGv'9eDg"QӀ"C `Gg!I*UwT}Ndb)Bt?O'P& I?c35Q?j0_e_St\*#t0t4D]PSIghE2M TŒFٵ^򶢼SLrN{S46-rAf\PU^鵯zdAxzZ֎LGYSIJl,%eV̹KCMT\yiGkt1cK{ (?A+1o;z78~y:_ozsP6̮Q/VyvdzAgyT𯐢 ?_g& .L?D \ǣ/T/gLvm&٩Į%3y]QqB3s6R/ .]\~>:*QMR髻#{rַ4p?>)zچx t\`:'0\Ak(A)2gzȑ_eL[_ !Y` lf0NdIcI_}ڲZjK-[;"dX,֗hK੆=l mIlHDtJ ð{f9R!@RUo$GhOL΃_¦ss=.Q^gjvGեn_uwN+"6O!Lm:Iπ q1_vA JOa8'.HP=G3JfPk_W˨5JntK}}V}ngY WEzz$q zK&%kVݯ͘sp2{.b{||(< g<'j ?D%}+ye|\U鬬p#7ac,Ĉ2l3":C:#N#Nv{qg `i- RHEhXyɵ \r$ ic,% : :7 #$)ј6^5v LJpZ+n'c`g V\gݞ(~2HSi\#5&!ϰJ '*)XxM0cdh)5&xhM^RNGR0H[iGZh[)XH$(T&T,؊#2\BXERkK"2^P!׋K?XoU#͸wA~K=lϩrH# be}0z)#"b1hʃUXH?8E6_瀺Kru7C>ݞH^n >deÍ{c璋]}_MaRrIѲ>@nd]@;84˟`aӏ͵%v7Sbq'o/!Ķ̵u/ r5|b⑅ba+*nP;I"A*kydwX]lOdGڶ5.tY~𢼱گrʭn[%w˿Ft~dN#T#W}f{Z1z:,xdʻU@N"ò2( oYcJlR5Hqln[fx0tk*Քu" 뤷@avųJ};P^5O'9N:<~(h2Z\yro0Q("A1dD0R0l.) V#&=i']|X so\6< oAApwvF8 : X8+fCתN SH1CP '1Ok%Q4\{^B_|4fLH)+aJdp{C>>΁h/7!%i=˟~2@7`ɐL[Ed2kws|fn_o[y˺RgiXeAc,6ʌ ;6`#*$;A Attטg)nlȼ|lnGtvx$7eO)*5`5]Q)O!+REw@v E=10 frHg[E‰ʠ]-U0QEǸ>Oc , 9Xt[45lA=҄nC1`Z_pPאZ(ڌbʹ1KS#ɬ܈m6 mfIG9_gI{ɱ;Q3-9˜R 3qBFyI/*Zƽ-:;~|4JtJؿ/H*:R"K-BՂnW]䝨DFr5Rł0:u*"QNK4^itLݳWr{Tё!0Ll=b%{=fhP^Z N4F0@0$&r|.0ښ:QD=L+IBj$r{r,WZIN}JT*/Wpb{U`Bx?kWm28J\|pQ 5ßNse0.v SU~GdgD=GS,p"Ӟ`CFA\E\@ApI'g kNಸ3 _w"ުzN`|D.E%jˉʚW@HI)sQ.;TLKr &)=3D5b4;rvץA36$BD|L'$"ǠcR¨R1EzX8cZnl;!ߥdPpRU7ba_#ﱃSg(yp{_5(v!-yp)~ΎYj^~f23$:Y{T4`jNg?ݠw)O]ʬ8旾yChXyJx8t /ұ61.g|ӫ,k6T:%$bR!KV1?eB[>jmģvYTkR<=*ՍSa:"'B 89}:(8:1PT``#b=SiSNӚGҚRlt̥HHy8h!RydF0TpGO1/p,L# DG @$SRZ(:o;cƌL"^ˈiDk45âcؼ쌜%l:jJt\an"Ԃ;[T{`Ju&#T;.pE^gqHw-QL2v6|R3 ~\L6v 'Ίb2 j{h@;Ƕ^WpdU͵!tp*O'q7ϲVID VuXbͪUSWS3~Ή]~,!O_Y۹{ڛӽ Xȕ蹄'j>`R#҇°`дsWo|c8"e.^VJ0z,Pcxn7u)n7s]yLYܽ0x<}GV}/a ) 4XPDȝV )#5E٠051vE:Rb&(cT1kQRkXR5chg.du( Tk~NP)&HvW]6L˝ (e(6S<(AFIxDS#DK)1VcAQƴ!s`@5^ߤ*k2by`@4:&(f J3XXDI$C^NcP)2L\J񶸞ǧ%򩅵 k0gQ P4`:e1(! 9,ZI'c^=? K!=?jj#HS$4!BXìW{ji#yI/H >Νhgy{pgѼ?K{X [[Sg\0F0xV=K%]Ãiq(* mTO^T6n4v?]Oo?xӻ締qXirH$ <{MMVhګiZur[X]nh[r_oGs7Ƿӫ8||5WZ b~L*rFt]y_H^#A~ޕ_o W֝ ]_lݵ&]גMc`f'4-UDeށh'2KvVͻC{NjFG_9 d+L3),׌0M7t,oy3eyx吳V c1k I$%XC`(,k'Naq3;}C"jn>Y.L K<ăAߍkNAl>4IBCBcIp4[yf0.0l<D\y sZdlԌd3k,PVtsCLÝY[bnڌ҃֙hR˓F'yG7w L&b>]Zb<܆A:,Iٞ\`˽uq66 rw׵xsde`$798a. zK7*`C;[; V2tEٕFo]FfzOda>wgRfi;0Bl3X04L`D$/h{amGMvԳSTxcD4j$wem#[~nԾIpo03AczOAod-q2sVlRSql"gΩGP:P=H4)I, KVz}0,YydcZ3E@%x))qJ7DdU2l\z[x=7mzL8UcU˰ 3Rܢ72x_@f 2@;륡X G xQ;*^w㩽Yx{v(hi|Oٲ%[lǷٿ9C1ehoͮ# :;7̎ˈgeٷ7sC;`k U cR¿F: lp6HF7g%1@hsc{3۝y Ú!k!~o[ւ]ZmFZGBZ cJF9xTrs3 l`k}>JR~}^wwiniAj("6Bo?洹@܎G2_[*qR,d<ѥ5ėX"r]džMU~6skhLeETgv{%ϐ wfk?FQҶ{j"%` 1S~ <+j-XDF<.OwwkzW8eA )5[d0j\%G +6RШpCQ{0rHRrHP@]CP qp5H1WOpޖ=? ݷm¿fHSAs׌SxyU&HXHb]6 \_$KHv ta+ p~` JNO`zMWAw#2\ݱƥ:K"2!|$1MKc5|\j$A*2 %ӌIiU!Ǽb91c)3#$1YNHR$8Qe;hc 5+f\ [Gڢꈃ& 7 QGc_A53)'C2d+E^L.$߽rfTܥN5h}[-ˆ pę<:CUJd@ZMd'B%7]36sz|ތW7~4 w#LD^1#​UP܂c-2` ȍt*łᜓnܙzDFXyU:C  h- 11cK$*5 !1DD@*l- R/jɢr6dMS&L lB劳 V+ũLT"B *vqƺf墮},KO9go΢PiR?ѧO^T5ƕ~чK PkwWZoI1Mpf&T큫mQf<)VƉ0+~r^~zZ\ TLk@ݘmz iLig=^kO=m꫿o˓*0 ,Muy$dU@;-xh Lf _-8ce<-+esryw53 sų"W,XY(,3cxy0֫LE=ɤ ‰OXdOֶd9%@9.kSr+!m(at $%46 7s{dk:"XްƜ,ppI p;҈zٜ%L\{=Vٵ9k;{R-\ osŔ1DMK$806hjVx2 uZD$碯u&͆gt<P^  oOb߁\m˽G<!X}6^&JM02XN% ,I"x47x Sg{V#a#vԘ7vXR#ǩ׹Ƒg(-=yUWEP|5le <˟YrgK)vcLvkݶ:K׻q4wWM.ѲQr"h@YjÝQFGR :d`:}<Ŷcgˀhyx.X | <;eq$bmiPL}3aGW윥u~<JGCJ^bsN#ro0-8N°ԤT$>s@ywsz>el}~~O9`RMI1 Rwi <.ѣQ¸f{>X2'Ԯ]c4<0KmTjσxi23Ńdega̓dC+^raeX!lIÂj1Gj7x" ;?rvb+ d&32dCoʑH''D6t6nM mj3:::bH 7tͧ!X~ne31sc̱n|;$iFX[FH(򥖜N)IfxI]@W񜚗ߎz<'S=<9pQ[R(r],<6 s|m(}pWEm r໫^}n~~-i\gOS&? :cU`36 sHfE<܌0Uh ˻R_UL{Nc"Q4Q߆܇ʗEs< MWw,K0JmNۣpmv 2AJI7v7J/\MxzߎG ~/ c5*9Հ*E,/br|V)mĥrfÙS0[8 4PEꏚQc▱P,>r10 X22Ҵ+!rhe >H0A0ɵ r B($<3@cĜE NVF?6*eS 4d[ `s$H=#-L~I G?eo猧Fgd${B5<7|Wy853Tx wɅ /vQ>$i*+W鉐t +Qg|Ǧ0 AHRVa 2LwZ D佖ÅMFS[Fu?=>\ɇK#A 5*:#QI/ U SB B3E 2$5AZX,@(vGEwo> ̂HB9UaaR"Nak9EBw^YkXJX/~ X| =Ur[VW:MOs֫&;P2zEi3A(#$)T'}ԖxyD9ׇU|)/KZϚv'x>X/;6qMz'f0= _x:*Y |@6 j-$* %^FHHLBDN>t˛@/H"88fJb A|@N@`kAc}9A^z0= "e2dL+ jbKp YobA% 9Ij$f.e!c-a,iYҲSZ [!*ի:iu;9sH#j4n}LXoI? n4 /i6\.o.)o.>'7_#>p3`#?8t[@v[]t*|prQL +Kw)lp+9rfo6j/.m닟M]΅U{uhT"['ͩWMY~noñy(آJ74n\Ňo*^l\) /jz[pR+KAu}NVDP{dnx_*oliMҿ_WE!'^. B(. B;x )T.͘*2\&Dsh.e/&uN1)SՈ1W#j\s5yT#>eF` O h]a+{d!jT["2gʜ++seez [\Y++e˕YҲeI˕2WVd qR['t! RJOVڠL RzTRMnv *ԔڭnR6C³/sukJ+ZcFGBGBy* a"bbގ^[~_BMJ S C1}M9B#(I@PMŸ%DE5C1Xd5 w[t4 c1B8 Q$PNWH:):"dŨ Kjf;:HOKUw,URU3y C]/bMw k-vWݳߴo|׷7 0EI%3ȖXR?p2~Z0XFO?wAjٻ6r$ewp6ŗ6X`glDKĹ%zebp)fqi0ŅPS<1`D4Ұfu}.eHe$4D$ <#DsNSRXú~cPt-ʉWܙ7 [rR?N ^K戰2K.+K.S+E۱2J,7%gabor/-&2n"$)P΃Z918LIlTA҇*&h`̠I 3),8#EH%P뛶#g͆26lv?Hn}6myt\-k]h2J2B@G#Ie9$xƽ(C*A3 0o#;66x6urIl5:EfN >wa1FX(3>a5書|D7TsW526* YPŸ7[G |vuqɶ+h Za[4iZ:#Q VM&WK+S+큯L,'E7|Ni07]E~wQW>o X3A5IKQt (K}ac0{5EK?Hs"(ӂ$&O"@ZI֟8/Ɣxf#5QBWsMa{!ߞ\5.o྅M#Zтsju>oL_Rlsdo=Ǟtlo̬ankN?iu)6i 憩-b6ג''y_Q:*9 ZTk ($-ٱL6۱;ҷG;)P]U ~%x=;2_n;IGfZ0͒&90y(!FHAD)qQSnԨHL(EѸ!epJm<8v(Cd .(CS0$@.02L O3%W>zՁDy< y~*~stkO;w&)gZDgjN$ .R0t:CIIKp$8^*߽d :AB'.*#22OfFY)U"#DEr} mˏx{GƷn]̪ͬl,IY^;K /B'FZXbZM"Δ?x_hڄlDxReGT%<9/f=SCT.慑c*6R\]]T٫Y8ggo9C<N58]RدU&§y?t.m/;cVn՛uQr QC`qwW0ᓏC23P@vLFKNn'%9ܟ ^`$vM5~6WKѪJ>yDgc/,'Jfהv/ypq1Rpl9>tnl-Dkq,fbH tZᐓNMpmө/wxN\mjT37qΨyv1⯵' /*fO1Gb׫^^}-7k;$uJ9nvcG $GHˆaˇl02m~-21S>Lz8p^:*#GLr٨K 4Q9'<}ֿAS'SzB;Q?=TYi稳q_\v3wx _/>O? ωHY]INj<6Z&Cs x7WCS^2-앟5qwgpI:N>Pɝ0 l~=J t-P_Zh1bS9\?P5~2p yx/vWGvNjZժ~JJ۩;pu(?@{4$p ugmE8\xeKvp߿+R޶A9jY:Ƞuw%2yY)`lQ`@Š{mD>W5kwp9<x甍7?TRx>JO x,$@`*FP8y]znmilO9EȆ.*$ĿwF5 UU η]ܵ2DCoG2Fَ\m  lG%UB[ Ђ 뫧\|}o\>Rt$,VVUNsRg7$q$Nf/{fvz uw+Ԁ?[g}CfO>_ׄEYt; 3Mq#Q[-0 AJS ĩv! dHfEy:4Ya@vg%tr}Wn5*0ݤj>T^^O:o~Ԯ_fk(| 3# i%,| 9]0Yv|o5ka)ʉ^HZ. g,y8\ .3VaWfso'<0-қ^M;(]GVGjj[Oyis":mqǍvp`EʵI- 2mFa*PLS.3 c6ĽGmuð{d^ݏY3sp$ '7,FiN;6a3j Fd Ǯ}bq{YUiD4kWkR&W8P-jP"cކ' F_;lj`)IjHQJHYϒ6fA1:SҪ Ii4golv1'۔)g3qbڠzYjK!g=z]2D%џV;<]yCJ) '.TJ˝#JWpötբBBqQIkudqT%B ,qkgAHCUF=I&"GB}r ɝt n4{v'q^\ \G$[:*lrV!%<=@LCErY#TcFxI.Rg:EKbLx-2U6lm?L|VgG_ov+#00D(ոeYjT~7VR7>D"L, f8ꝦnB|Er'+lȭQǼs`ZMuG:eOQD1.5g(-Xi#1HA骗`l?]ڦ@}BǫƐa+LqUyɯ†{).H=:5\(IΩ(E=6j (~DaY$XP~4aL.crlJX-ec*7 p4*k䱈+(i,M+%ypDžؙ7W[?h&9=za dJ?gAKT>FF]p%[$08F{*9FIr]|zܦ4Xb#&#)xkA(S 9o|R쐆"(ߜEPu6 B yq.1n9@B>U0D|J(!Dxx+,*J,JpRT2d IXR؇ڑWDb9, !hh 5M-4Q5cnWo7󘶁*@d9}Y=~Awt\9jEW?y72.4cy=Ͽ-Gz"CdQxy'&Ym/,^b^X=fS5e}rH(-g;!e\rG[߳Zuլ;+sVra7-o> wn`xOy=S%e`pv_6;^S}ln?IrkmJ1R⌿2/k59rmv\ ί_ߺ<'8=jMpeq݇̾d#N҂NQSP>惘\4*!z&O$!x}a틍!ǫx-cjRvA0:΀>{WMIx8M@b A[FGЍn~ oJ߷A8"ȌƀQ+B``1TQLPbkнgd&տwb8*vQgAD!sTCDkN<QAR^6(ܾzNb{b)Bvt ߛO'P${ẈߍU+`64VK:~8#*uw.z}yS t"XU1ĨpWyVz*^67kPeʼn^t㓕O : ެ;\9&9KI"P&$U APke//_*) -iW#ܙw;d.lz߼p }j÷OԧvlieDZ󊊢!WS-+k)](F|E2EѾâh%Ɋy<^6|^(OF9DuD*D*| Q(|'S,{Ѕ=^D?MJ=q&ǭ>J-vkbT@I$uvXi5hiȁ1DҀJ20-DF [nnx8s˩JKX!9@@P{̈ĉ B>)M#=*Kr>b¨Ŀԣud}Xey%Zw,,;ko}-q;+|-M2̂,k図E* Y0R Zs&8S1ORD%' d2*w.!y8,Vy [?ZZx 6P\ڣmGxG0wQay:w FV"7&!g=*Ҧ-FՄN L?U."ȣ0k'mrZFy(!HAF)qPkjӱKx]Y5w'42<#`χv2qfI* -Z+,U2;L! a][+ֻfjMԙԧfֽr=Q4X9w\R5 L*j QZ.Č"r'痼X8Q'2<-\"=54O)Q$S2RYF:c;\dR3|Dʧynǿ#;7z __~o.T/FUfɞy7}߹~7)vYU0 NGЎ)>Y3$(; jSQ쒰[rq _(o.bVg"䓷'$:*''fkNnuocA?0__Bw)x߹Z-izr|s{Qmn$9ׅ߫97[:0~zG~\<7&ʜ;=yKw!1VZ u׽-]:$߾d!޿zoIy$EhHBni䋘\4*!uMg?相?оX jE!'D!㵌ժAJJ۩;pv8?m@}p?6!?'bg2(8X1\zm_0u0+R^A9"Ȍ *]I*ySEi0S.⣴ *!ssUs<9pROsϼB8DkNny1ǨT*l}Nke}f/e/8\UbQ+UHJ'llFia 'Ϭ|,68'04P2y&T*TAUG)C[$)"lk*lF4~y?zc%Tﯥ{{ zl[u"Ō/CU~!ٽ%_ʠMJTh҇)(]q\B#%5gPT]BQsF EY$hp2jIS'Dc sE8e}+U'(OZ?BZѱoU9sq}{(^k>-<{A52J\_JyL%^^5^ay *8)<i J!#P!vFkYSQBMQT6gĈǵ%ْ-%̹ļ։F2`u(MѪ5AKԒh ",HeIQ֓dJ<"cJ,rZs1DRJ`$6oۼEz]+OF,G+-a {5ٓMUu:4RJڸr_BS·SGY:Q,Z|V"AJ jo/Nt{^*^8p bXE;xS r $#[V66Ge%YM7g7G 7d2:w.k7u8_W&[Zbwfr}se[-cy:\[m3V[/^1O{yU15l}W U.RKM 6" i2..A5OqJsrkG୤ķӸ-9[ )ho$MT_pEGG _OdޱY6Ǜl_,YgN$V۟94,д= 12l3[u;eҒD@'-:}-g^nW<FIW]8ʤ2IHCgSP$AJPc%BQoE2kZT@8⪇ (^0l:{Yu@f}^'r7iS5ʨ)_u\<@+u}wJ5 U)S%&q WGLXc|N{sn X#|L2HI(DLY',9 CTTqJ@JWTdHYhrJHadԜ~JY^/3+oY Tbd19{XkjI1%#{Ϝ6(W[ <-oM*f,rS9܉9CRųhj}ig)PB/W$YT>(E]Xd$TVKQyc'Ecmg*=:d/~tQ{ROH_Z`"/!X.j Xۧ\oܟ߯cgag Z ]Vw(C/:o"r~:'%iMVx8dhA҈ޱbjH}v`%5RRqƈGT(:F^ܘ|TM*# iI]T2Ea$*R*ቭt!d t4-10Uz^΂Dϥ R(TB;D)Q)FXBShqqVWVNzpc}{ϫ˶-;\*\!` r@r"/ YzH%eux\)Y/=||90ЫMgT/J@(%* 7P6FD[FrT!S$LϒB橏!h%#Y/H} C& $؃uk}SBRO"J␔Ii"ΰ+J'Sh_:s9E4~ Ky}ֽү(WO>?' 'f[ <9!7XciWT++!@SKցcE΢oNЁ:(A֊TLSjBȠ(LRj UObSd 6XgG TiXrhCؼk~|_Pfb}1ovt3L?^mwJ( y0360ce- *BI L#xK1Gz%%$, w t!p뤀nQ0Hdi/qgE! I X+c/HƌHhF$A+5D%Uw^O/oFPLxh{+W3[IJE#Bk}\%X+eEIjR%3Z$; cdnD$.}b"|lє%:L l"2H%PKAYoN*C2~S=,B5ٰ1KyiHips2#xm@AXK".(˩$@m2r@PVX/WVxz{!OS6/޲UܱbtXb߶͞N{ht7]j{ ZE Ho}Àvgjl5fM(G7t9["W6qlVGG'H)xwwϟ&w?Z3^׭^z@;gXțjy|{#/ܹfWWw}Mx~nws:E?lw:Aֱgh=N̍SN n=ge໿VҶ:9 ZTkq/($͞X}&!Oo~,R۶]:&Cmc #^V1zZ'WNy)L$IN8O!)8%.j-t *rt)cH(\8Rj"0  P8P C s>?(\>}!@) 586ϳ˛!˧XaKu >6e׸·ܤLX|ډę>ͼB TfZg6)i `C'_uQ̿Pge,ˀƈL)PY)U(Ѣ DEr}AXy FOd;,aUMߠ9KR薱׎&4ˤp %AH3-3jqϵ5mk:p" Oqoͨ ƥعLӻ;sCu\bE!1F}K̥^dm]ۅ Q?z=K!o3IBg|˦iX45N,PhCj+W1~fУ9񪇛tZ:+#g=dӬ 4YG#d$O,}>G3H6N &ȊA%^] ;w?>㷟x2}o?8G 'HXXͧj Lӻ_7zYbjo65װԺSO|y'mJ=rü۷ I:P)"P8ڒ}Eĝ0$#|<1e£, '#<ߍU:gY-\Z#y/9ZaP~\+c PYϥ o|[J!c<:{*mW)+Z3VWכZ|5}:L@o'9vWsٿz,Up,(K]M+7)7; 9mo0Y/|ZσsemD2[lsyϳn%AN,XPI5[ͦU dzSȩSTk*5UO>/t* `܅J`sDRZb`MMW԰Cj>378J*eD[#h$ˍ:(a\JeSX8 BR$DqŜ4o WH>#Å&z\NG4H6F Yo*9P@Ոh!,tD{jNcDJ}!ʐ-$3J m-r90܇&H+)#)ƣ. Cb`hTKA ^ B͈YK8z=R >*DHP8; L嘃Nq1ʹJ&$,$UQ9|\*|2ƈN@c}]9Ch8A7Fe2-|KfvRVHƆqu˖WhC%pcy0 )ОR;[AO}>&/ Ѣe +x@*Ri\噊&4c7Xav|-;txHDꨅK$ghI Ȳ$ TrƘk39?.S>}e W&NLe+tY_L 6I+ctGU|d+z@\GZCOI:ꨰ9SZpe igRi6r:᭶&H]Ҟ-U1%(T[|9%x87)NMNWwU>Ťan_z{,Ѣ|I T~7VR|RсVE*UY*5\q;MBȅB>Iڭ IȅBfq{MԂ `xH[<.2KxTTyje&PhQx&:kqqǂ]5iJ#AT-F&7^F0`,Nv2x3 Xl`ȒGG2}WdIvl]q#wSd*dUěAI*3|b'_pXa"g}XYc'M0z1 ^=^D ^y_.׳Z%aZ iD(o1 ߗ[Ω_;?0-x3(d+h#̌7Qw*}YXee77θDfk`K{ dW[ 2awAePvBď,nބ^s+%ͧ^HYx -sӁ(^ &DzXYGH(򹖜N)bxЄ+V;,(~.E&! !:r T4*C)F: lp Z2 RL@|K%)(DPRukY!Br]Xgl Wn`M2 i?"ju\צw#,vh:)U420fD%$FtY$R:"ln F|K \H㬂!h L> 'H @ XG, Cžl_627$cZadgJz fy$S, A9@0I̥k@uXŁ@#i5KI;i ہP,IY-TR$;hKqThY~+s0zg7'?\Yd "X̵Q*g er9Suȕ'JJقj TWTJ + sq2*+D:ztԍ1M1mlgt":Z(o~?\}l<~He\Pq8ECۛ[!DO U&<5UT4P?v5ĢQ/PMs&8!udU"WSQW@U4%RW@0;!#dJT2ިO)M=a<۽OobĜU;T?cqPY@`2#'$; z#s(ˍBzkfCSѦR30O5" vxBC,'1A(jq1z)s┦x!XOi\w@\P> #n˕lʦz_"GC."8Pdrב <&pS(Bࠣh h!5rZMi|NoZӼnKi%Μcf?47&Dh0HdCsKFD§o|cߘxc;G#YFq)=6*, KN Z}cdTL<i%,\6%b"8y*6S9}c@'Řc򉑛FqAVt6çr.cwviO_B9ڲR쬗b#)ES@V1DkeRo-;rFF)|*MYnQoA3Nnƽm{]ӽm 2ɂθ O &uD9hkc0&Ju6Ho|$m\s(nN,b !-0i 4! 6 RFؤ8:3]l[a0nYF7jG)ԑdzo?-o3{(R(q("FY0Ji季 pJ;ʉ`Bכ҇r 8M(rnSxOcZ)"V?"D4ֽW9w@C3r)ԂS<ݥWfu; [9rqpO`i> <RĶ_m54+r&x)u踔D͐&mX>"x^". p:d9R!@q`9 &˝W=09k%ueIO0E %UVի/# Iwݧ0H&UZ?\L:n-ΡtT:/K/_RbkݲGJPr ej  /|txJ.^W1Fr4x~dx39|Iބngn"":Xsqŕ/ ?]~2MyrXZaU̐Xӫӵ723wR*>#(Ҧh|^D1c5D) B2HO:<]{5f&P&#DJNc2ŢIiRf J30  Ay_ϐu`(`Ϲb/nq <,YF"@],EFaÊ9(\-$ ,n 踆4"HS$4!B! vt{o=faL}62il>A9[O8ni)j$^~?d)]o= =?'Wn\W7pf.?nML~ h?+LoatFQaGMi﫟Ӱ3HS`{tv#3;~8@ޝG+XUE-6`M@ZX.*W~m%C-1|KwU͐f8lfYރ F0bZŻxmZE㪲U:dU}e8ؓV#e$5́}Tn̒8g8떧I ZuZutͻûo_cqukXq#0.y"U05VO՛qTUVuz4+X]h8ˣ]% J?f?|oZ}wɋ8IN&Xĭ̏dMBQU :B(1'zP@/~N*/}>E9-Zjy7 4;EIO@rq#~]%AFxwHsIMܨmc|  W3DtJ2bSXƁ׌0Mh }s^c2W@9䬕&`~X`wII!xfpyO|赓T-P0oɾֺ"̦ZqKdd2~-$kPG~jr]yj3kuV SPSO&{"S %GP'QIY|D$lc{u?Hi%; ^rÜ9&ΙA6ƒEmYW- %teWtASYk5Ra#rc4*2yt"eN D60j 1iQof Uvia>p V ܃0޷;X F@Ҟ_K0s|Tyd)#C(<%ɥZ`{5{C\,{]Yo#9+B4}053t,za B *P ގU4.EXbSh͔+d4&aD4V oIr!A*2 %S^ZiU!Ǽb91ՃSE\"$2!*}N2Lc Zkl ϴٷ~_Mg\-7I iܨMnnB,ekf /@͆.iXtXn|@˱3OSeXxm.^L纵~omz)>-^fMrM'dd{us:wsa]5n{ohA9ɛ)J)M^.B.KK gKh\]Yzxcye熩Zf'}DdRsOM{"3Y:.te.k.i-^WaTapIad1ݾ$QGfy] Bz݇ڟ)^;Cw :FO6>+n>C}0!D!$Jp\ 4WL ?*e[߫gKv.Zf0#*(ncDj mYKL!XP |뎄n;ӣaݹ[\ECv5г;_:tͮW~9}>=. .d^mI ̥=glDX ֢^>F0Ҏx4BVYE 62( F刊`) NPB2S"goY@g"6ݷ^=X >EP+zLe\g r߶8)` E,TR}S.QNi]H!wÅL⪐s ;8 o"s,*jC2@ Ab錳TGCJ9a@wsΚ%{-{ 0̀$(xAcf6\J%ȉ.g.ݎO`R^]=o c=cWM}`J xLxU3"U)S-*TJ jQʵ g}S-JgWa|fTV3/c2fm z *P!B) cbildid۟gec}9w4&Ջޒe1 QSUX%#1 )DXҠ0IXk8%^N)i-a'=٪|s_O/1+}lyb+5JEY2'Ԯfs-IcF 6(SX8Si Ak@mf؊ؗYGCXqr A'CcMQ$ra*{s5 KA`GT6 F} .{P>Wo$zyi髵:6܏>_V  `?:֊vڃ38ӔLJ`N@u'SuSo,EǸ.n7qSX X,f`rbO TS52墫Ce`k.PHbh՞2#,RFXp4gg5r;@\ͻ[6wNWm z~^P2 zQ^ C3A=-r$r-Zκ%Q)Xma-` :`a|^uuLiRn+vb:v?I?Qqb50Au Dyľ̮KJfILɮ+H3o A#.5І"xgPPJxW`Oji͇ g /_J/ @UO\*nM(&ZK^hp uT:a-I?jrhBR (%2?U^N /M^6Qj?_'I:f0Lӆ"1NњkQ %iVԜ7?+"J>e8@|^ ϓQZLdٚc[q_5yiljuTހcoFD~8𠡒7RF'͗$!Ѻ L (bXI(FҟP,QID> #P!wC1V(}@lϮU_9,iy]!Vg e<}͂2*NAho]78Z60ZHM0&wl`!D*(&nmڼh57u$RzڄY ؾJq,ky܂5ţf8BRO'XҐ|7Ӗrw \+'rAΞ(_tp՟$h<䟮,\Ch9^6[=;fY}>r[mѮY)/FkUIUs2Y\C^ {Yʳi\yz:pAk=豖z{9< JŞ'Eؿ繆q_۶;gCع[n^M[oayS/>AkV9 Be܍k$;QsPIrH`r=z^7W U~ϱK6]uS[yt&^:,cF7{ۼdwkImɕX"Xv`SE9޶bqnƛy;mc "/&0 Kإ$jz`@110P SO}&{:U' L^3,*=P/ŢS[9 yj6@\(o*ϴ qE)D܋J*uE\ Q4#'ZrFlE`]!*絓B 'D\_ֵ#X;@z@HPIU: o=n(Oj8S@Q{;yX:x U/8d )d:&j`{b\d́^W)  r?]Rt J2T%^+n5.U>"Ir[핼FEqLϮ|ɱbF@ϱ'{K6m 6 8Rɋ\"xrie=9S{rГ =?n\ ,=>b&8C@9 \?adaeqTHo riĆUUv#*F-[q./W.+R&qJf>iܨMnnqc)f/>L_.-n`lӏ>%_'d|$S/ {xd!R&R1тFQ4`pHQmz]t%a`fbnwb4^iNX[^JCoWz妓r%T8 y&SDc(Q"! %^,xGI4 GrM2l |z{oVZ90w&тIL<-a2a'?L6K2]7z<(-,z-%-kmr[ (*telEI?`F') ́ oOLBm>֏wPTyݽC|OۜnKΆwmuDžvLPj?{B-tm|;{LMԼ"Ǒy9_Xvq0Jk:XU҆nS57P'L'F?\V(յ  qUUg*ߩ;]V0kLWL>]_[ < %T1 &P4Q Jad1Ʃ|Wϙi"xacEﭱjo[7gEiѶ{#_}ug`U6u^W=Uݡ T"ol,ܺ)/C곲jOY˟+~c?;n6^b1 EkmFO@a`p8$$\lr.,rl,9<[l=,yeʒ'df&ժf~UG:3܄ Mwڜrh:ѧN.4Aѡ"e}1(>d^# y`D2HC}Hjf<@*,{$ "Ҋ`$x%=YƔcGZ2ZӐP`9hߕj7d<.>¥+V,GU\څHOk\OvW%ehq3g0/kz2DG;!ftd0 dsRL+̄(#rH PX Rd0 󌞈+1Kqp$ `!&ˢ?!$UɅh Jv$7mK;.4sNkG4311Ж s2E <]|.%2'CV:-*vc4^i`.+N%FHD\" 7M612e;Y Xi1N}ݼv+/οm/gP<;Py;ʹ}7?~Qt#(i$nlP& ‡JC+qv Nͳ׊ȳ-pF,XG<1 F8IEFTDPJ$d!ԪOh#w.: .r=aMAH*PdF (0^ \5R][|2q? '7٥YWZތD^(pSHk$GN؅X cWL^W@LWWKn ^.,GpИO7X& &iQƲ(,)XvXvB0BI̸tșyR*ia|P&Q<1H! A6+zʼnTo:29ϮuJp -6&;M dѴ\On"6p7U%i>9n&1 ^!VD钊kD)xr,dŜIIGƳxsp<ÜC . pkH1GYjY_yݡb_ڄƦhm vEgYɲ/[bs7>{CI]Vمn};oY>[X (KQꈠ1d:D K<*L9sȊr3nYirVO%- I[)R?,Z{Ҡ7yEi~";V?M>cO > k z@dݎ{^xoi`osIP%L>/h eYLKqU`G}zNޑWw}N:*-0n1P4$cq)$k[vѾ3h@r=D5"Oe}M6Q?.{1N?Njv6^aydtO 2`RNFe*ˏ߽ZK.tP=ڍhAh7yz;X͢2 vAE'FʍnBrr:YEc:{Kzs?Y"Xڀ :$8D(B^dY`BC@JKtT@O2$2GJ*fg-&ځ<١#tB˼J= CK2=WMf|]" ~5e%xr쌹#s3hi/G(n5@'CR44Y$(ɰV&:'>y@w 14>~-  uĻ?g_Aq]4rټKjtNmT:Rnt};)m@% M#A4,DTW%jXAL*d{bq!3\Hh#Co47*Gì2xQ$BWG!!Zb*\QK0b!X#L]ȫX iĪ1 n1՛9ɱϑވArF)$sq̸F%MÙS; JkDe)lO8\%%2RCpRȔUbA/vVei")3(X>( 4Ϯ-VX O1流X|_/I0Z2 Qbǀ$7 @FfK4XR_rVb@ZZ c*;Co_i#5SBkY5+(v#7iqrӤzkϬRF7MIWV{u#->kqW|;@ߵ mם?BQ~ӗmӼ~El߉}Z{?\R9IyQ- 8UV:Ѹh%.J<IfEB_+2VMI|mg>c{Fim뇞ࣃG^Itsȷ̊R7jM)Ko]wʩKBҟd*@@ʥQsNhJ̥pHUZg5 es/E{uͷUxa;([˭͏e;C>(T~zr8x~?~ng%NN^&#˘a,%/!YV%Bgv:h \X,PԬ~o0-Wl[{g?~wv~s۴(r^7gtpS;EʹK>dTl8:ݨDĹDx&! Xܰ^Dܥ6ٓD"ҮK5/c+Zݯ"H'N8-:l)4)s"yTF,$%ЯvB[ˁTDpDQp:LB{& i쮪FΎ>?BE:6ZCRd!6ͤ}+6eh5e5'쎔▻9b6Q0p6RD¶+ d o s![)[x }^ MߍK#:ϱ^t2 Z樬"w^jȹ%H@@OlG8 ˏ톢ۚ'm6t4OYu β9"ԍ.pRB!O^&WH5 \o dsJc?&3vj&32Ƒ0DP8b@E@ "O,r.1ِv g;FH{5W4ֻ9ߐSܝ({u| 1}wY:ş|iS㋵.~\vk.{k HoC|e`ٹdDD[pwǩF~=2$u\s(NN&8ݒ[v)0п?OHޜofcasCd,H̛md֥-fׂ\uLQvPNAR/!Ч{!n"\brsRʺ-fbǝfW壙s|h鉫q:<ugMw5ں>·Kb +mVEo}86 6hpE"K%gwxX}$Y'@\[7 gz݋rs7sFN{;ۜ!&/{6񞮚ag~2]{DW ӯ ٸͯƅÎ4QS=pUEf#f8#vf&n&:{uׯvBZ6ɖC㵌ժAJJԍfa8`O]`' 'cQG਼{bD Zsa?6o2;7By2c~ Z-+)k"pJJ# L10 ߖ|#{B-stiz_S6Jd~!8k ^8 x>JO xd|tR0^"a^oɾS"xb(B47{P&< I7NӰ#fH{{`6n q{r3 kϦ6p&WR8S;WuHkg* mkCE Pۭs no渻0OC2;>_/<1rop6E]ןa|3%h.97LѤ\B~|OO]ٰf 5g/ ,5w.Gi wPt 8SXiaA^T"EU*TN8Kd?s*S}/*:瀳lsJJM$b*khtJT@A!T Ox -g5r7[gwvW}}<Ɂ dcAziGyk-;Π¢>i뿮‹IGsBW-g0E#BZa]Z,(&Im"4@ "dsHNӜlH &ۓ@E9IHc@3CMP3@p&e6Gbj=+Z,Fj"ě1U1a ?VH5}uڞ4 1//>xj!UWF`h$Ի,ȋ2@pRKyz!R杠o$`2Zh9  X@ FGmH1JCD̸s0Df$(H9wX#+cC *7ߔzfz6jZCk(owiii}DVyArݜ^|-44yQsP9i)n 9FY*>ƛXEx+h)oJ{8JjL <jmtp{' )q^j/)P6(2΅H&(O$xZĜS)OqYmC ]-HcrΒ:*Wط+$>R@sl=FG 11qs*<@\K M e*h }4Clv\Kת67\TAk@)UmUNEs:zJ?mcцi(a"XeQx&WkV^z 1DfD"ֳ,2T( ṔyRyoA.bTn3cvEӓSxeS#r)iÁ"յzs>e9K ]t+ܸY7oV4.-߽񉲲Fso.燛`I{ӧ/܍y_4`Qߏ6ݕQg;*֜W%{gD(ăZ5֔x S"PnX,a:w-*ZGޗD^uG'tc,u:Ȧ^ 4س^ى{$w,}~0v)=OJ KǏ;{}g?y{uxssOg޼Qd s) oET[_ޮߵ⥺]s tKz2k}}[Zّ[o w~~?w>'2Z{~DW ӯ ٸͯ1aOU8q*"D3ɑf|P;Pd~=qWEKm-kU:թpuїPNyO;k+QyĈ@+~l4oOHR:AfOuA%r%2yYSIi)|Pac22Uտud1e6t@yS6Jd~!8k ^8 x>JO xd|tR0^"a^Kbh/cf{+H ZO[QʡZ 4u;| ,2̎0;|>\; g&((V`(pyQ0ՠҫ D:M=iQ́lH6MWv/J#k[rwnj,-WU߫Vt|`ZpAGkJ,n"сg:EQKEb̪Aj&MrjhJTumf[R%Tx웻 7MwUu Ӿvվػ8]-xKVuoFw'#ކ' F_];`̣PBE, b>aTނapIad1ݺT-a< f .5ߦ$_NI#O{&զVt\R.B@4je%Jӿ=*a+.o|4LH++p%28^1&~aO||X炥l^F}DBm(Zc-2tɑ)$"7<#)#9_{% A-JVN a]-.#f5[:frY.oj^9Y* .d^mI ̥jglDX ֢V>FҎfShF1ZleF͝QFGR*:dN8)Q=p8ug^%vdMQ+β|.RkgepT1y 7K&YAU2Acjկ=x n BX_@; ~Z~`lJJq0( diB.33_0^&۫+ cAELcZ5]ۖWx"ϖcx72oβ_Gׯ 'uVv~VsLnmv]o&7dg0@ןUߔl/˹Pf'`cv7@K/04y=^1wc^ݝę\/_H7aq,d<ѹ5X`&w)x9 J䪐s ;8؂qADXT e` j13,u4D{SN ƨiB[B7zf!% 3Bs46l/7& & f!* 07TsW|UҐ F|jg{Q)%34*?A"$ݼ`Ɵټ\׼Z)ǼJe^-kޙ׻n}oVk]%p1>v%*AIyǮ^ "D gW0Gî JrvvcW/]QJ@GĮ pb®`rcǮ^ bLPzL`XaW \M] 㶳+R|Jyy:w>0اldB8\eocrqut@rҸ! 쏏oFIjasoxIYegh,D}9D#b0cC%c~lZp,8"vKF]%p9?vgW J%;vٕD6ezxwpxYT+P?6T;ĩl5i319)V^gC( iD:%Q%,VH%0.he%5K:E~*+~."`6*t%ìN_%G0"e_I|< TF9bLEp.`/+M"OZ/vAI458JE$*FfPn`"Z}R}kTY9W9v9@8?bJ4xn2I1 8 ~q W3k>f]|\isƙT1&sdyJOR"ј1T)b+t@8MjRz\Tb Ǭ71K E븑Z+RҦ^,y+ M멭"|L^bk]N8vae:fӎ|\ `g m9l>y1߄Ay%gzZr;O0;%i%Ik¾K\ =إ4qsأs9, L10]͏!Wې2 KؿEvӓ,[s򽯺/vP.5G%<.!7m.!l2odה+IcoJ:CzFѥ:Rv*3Tr ڐױr?)a RC[Ƣc\h@#10 f1grAA:F  W7ltJǠ% ZGK!2f4j|sWL#,8ZI}%%b7/e7+Z7?Oit }-zjl:STIQTcVǓ""&~w zQr[AjK ^Ȍ ^`AS!^f"09cc猫MaƎfy%:^/A6Ik5˗K{ݎƮ~|HbM$p@xCacP\G* rB l?hd 3 j-$M* %^F0ґ0хT sYϱp˶pưcWMڤڝ&)l%hU3VJ{ Q+QIiԑk(?\H ,d 1+ FQN< #ƶ 4] 1p¨~ Q/UÏpDqĎ#6RJad(QHLd$&`u4(%LR#1s)޷ }aQ;J(vvrOUz,⨮%ȋcܲx.^seH6^""E_S{XΤyRvXӐ.bkIߝVP1Q|OY Xg|D8c = ,Tm 8m'Q?¬ڔI)igN+1J ousZ=+Aɓ]~m"g5>0W ZкjWw9tfիYt-sh9ul}}W^6yj| +]}1~ΛF YucopB:ܬkPs:i՝K#7 w) gN$Okoo[g?m*)6oO+"XkZ꠸Kk˱W/Gg gC [e=-&Hz. +_&Q73Cbʟ^mv^lBM·. GQ%h|/R-X7Q RYux8@ZbӤʳSHeD"%1,-+̀fXE$d-m@(@܋0eJdɂ﷋Xs&AE1b0HU)A(4HLq!?8JSgS?o@N\UV(9Uyr1? Wŋ77y!0ku̕~\[vI _n&Eȷ~k# Gb!pR;<|a0,`Z$ŇtczUvTV:dݨ5se8سQ#Li$ }18dØ%qOp6,O9=NC+:/޿K??9Oߟyuo?/Rv:VsW~ !?e%mjho:4UlU|qkr5> }\=7Q)뛁7vnNq&XĽp%v=~`_*OPc *Gw ȧt3z8:t#/23_גc0JStq^=႟ipНAFxwHs޵6["i^2yHn`j#^t$[lڣbUbs[78; Ēw)߷B43k[<&(;9E̦Ʉ%e24(twFS)x$R3Wdc"tpNK!C&0x+*+ PM P|T◛yؖ˫P3ԉ{W[6^zY=_WfqWqOy+rM )樝AQuȨMbQi{"зYh#RԀ(-b- )K!&jK/K\Ikc-PL$[$ T/y䨙8& RЁNf<2~},Lӗ_Ë@}^H-(|/2`Y3"ɡ ()$UiɩRLjP]S,XMo{ےo{/c^ⷃ~JA7tg4[i7sƌ|NA EtZ Uˏ1nPnlU^:0v<#{zYFDIT Be1;W!% eJ`|rm6{cCUys_c|̣*5'knW[^(FWs'(np}vX=/̾a46.~קSU06Ot5ﶇl-׋^ii֍3-07ė0ҀRyIeI3/A;Sޱ:RBQ E%'PԟDBQz0VVK&SL:ELBs)OğBZ[wڛuמbȇv{l3ͯ@1_bZ-u8NNnq[7ʒ, v1tt^G'1 ,y2"g9<)Mf%d(eA4ѓ!fp(1jEh(HӮ:i$@cL5콰2qwF XE2S1_l&#Je/.枲y6 :`X#H_]-JZ+K)PHC [L_SaL 0ddڄ2j2)L>&i a0͐bo,:~S*z媓/|HL+ ba%0 %}.*dD+RkMW.ɩͪ{96*'o=2H~x3h|@0dKv?kgsP0L~|S]à^ӻ^mӻ__p wry;{tï ,O;H%3.ƌ=6fͼmL|?睼?,ܝ/=6,~2?_SJ5ř_:,?h^F4#?`/?snm7ѻ_?x4>j ]\ w:;onZ9K L!N*TJe H3]6& "* *t'Hq43Eo7ڻ ʯVE<X%tʹ%LAU) :tK[Y 0'䢥sAE btXwuroi7ӈN//T'1ȇd9?qw<ȲnmEMJWE|[K_0gY kER \DBօF>̘%u,/ 00(P9sr:r6;<ԍ `5D\)$οϧg}G׼^~ӛt|փ}E-ժL7< JEkP(mq(j-%Aք`!Vl&Ɋ9ŀZ;o8~uuH>,ҢMirQL'Pw`@{h`wn66{^,ì`"E/&K-hx܌&Mn6&~ߢ1~%X<œN[2FG-/ɇNw/LsT%+fXbu^1mV֔ )!΀6Pb|(S&ȓ$Vg ~g}xzyy]e&Z{pGܢS b,_G ‹t<4n.fZ; ;77-RD,uT"*꼊2hwA< )O[~zHսׅswKX{>4vl ]͞%+` h` ,!%d 2XBK` ,!%d,, A,!%d 2XBK` 9QjМ.?y> 4`ڡƔ.bE0xj`G[?GGyг޳߬=UY@&"1g%1a-, H+PLؚlÊծ)Uc.2  ꚞ)@ }ary;ْMt؍q}ng̺e~˖OV_t3EƹV>P{Tes)4W]x;b]N#CݜxvFcJ26tgp*[e)I"t˪.S}jz+ ÐIDh 2P9]h& V.4g#`\7㷯I/PnLYPwZYwΟ [t:{ ?PWzxhӘOCS %&C5.Fs9!v>gAvFQQ8O=t麘E;ū3kd-˫ϣca\O駯isM1w3 (/|2LH)aA*("K,`#}[XnY,j&ή~'ǝʐ2@D_U#>xRPL8 STJzcl JxR-^o+/ʨLD7Ni7/Z+qO|6]x ӺN_=7Y%fW\O?ƖdA?2<&{x1z˽U;U2fˡ_PyD?1HXiEX0ZeoRpFgX}$TR0*D[ QP{*Z*jP"K12]K, R8q7Ky,l52EAfXxpr _.wUSݭRh~\W~YZI?Ǘ㋗{aV2x4XC: EH$B"Sm6!)cTZU {‡L&@I|cn&~ĎZS.OCQ[7Fm5`wr.H4."U;p:o#+]!x:uI65>%Bp(_pKPZ#vivWtUbtTͨZ))GZ#G%QG]0r! 0hȀ 1- AFQNp#@RAsR5FfyX*u%$"mD\[ay"8MÅ.(I4JE$<F%}NL(7 3Z}nA&IQ!!: $gN h҄Ij$f.y5$)r6K*:;YɒV w}`ixފ4G8\jM(.#۝d1չ5*'t'@S`a&`򡜂3E9BmwuHō4чC48.6&bŶBQ8G?:PöCZ됖8"O$^azH^ϙ.#ϭ0d,Z %8Tjyl[*V|_08"!Z(&yV::&H}>sL(QmGsȰy" -J))$4G9v  ݦj&7e,ZmeWo¾'`[ϒ/bĤzׯ6edQMyb9R3!O.EavmER^". p:d+HhQo$7 /<)*[]<[yo> ]W]7w>#r-iE*4N7&]^Tqd[ZкjOZsPltfYv-shY;Em}Ϋ[oa*_v("#m_۩!ˇXӣ|ËeGs mb. GQ%h|j R-X7Q is{>]H Ժڬ3DJNcRYWS+@6(!H&6!cRA0Eh﷋YK36_E1bPHp(4021pxW c#D)ƼB "ۿn?ѴU-O@4 DF %1{o=faL}62;w#!w[O8>_d]%Fګōq/g8gjoJNd<SX;|:VqҤ|Q,i?efM^gVycF'$~b'd},!; dBCܠ )(ԝ׶RA^ΫgJZUtiC(XRnL.*,'7]Sr=[ϝWU q"ӵqh J+UnL)Dk{[6_Ut|}Ph8_Zu&NTF[8'.S F&?g sZg_'O>ݎ/hՌk7dm휑( ?ݎ # WW CVa7'`ZR> ULpпٻM~zF]1W=H40ȥo c)>pru*Zө=`8sK؎qt??;}<9&_ߟN? KHH[]Iu/܃GCO-iSCxTmVMκZ-U)7{˥2s#d J?g?~y7 R1 ʛt4;5A#%g l~=+>BIP })B.bY  ܗ H7q'Ib!Zhڒxx-a|cQz;eIַ5}~?_%AFAywHsIMP?2_:GA&̏^)J4)ɈLa)Ro \30ߘbS9/Yk#-/V `c,HJ=ly8g w2vjۛ[llOnx7nqN^Cܻjzһ ZUz|ʐP$s#@KV| oʿ߾R&o |4ŧKу~ ~sqptK*Q`?r~VxqkGhfe_ jo9JfZYy.3u{)iw|P:@ `JIϖqhtghuq1Y|W+lc)*i7Yoˣ;t~V=WPi6I.ߎD&޷,)g5s"?`7fMI z[xBPql>rڻ'"7}pD_6 %7iPsfͭ$G,](ת.2^a{ϬxE^axK!HʍW^  w@B٥h$C)Ԧɭډ̛-քޗGyFXhXjiNS{!MMJK9]i~7m `hn)19hD}@+ l@7FDCFG#p_{=fhM*JhƳ,:e_e7 d =!"}рѤLDpCLDK9 QaAL [ lW݊@]^K2xx7(T8?^Ls\}WIA3L-Pzi(;-NT#U̽$1;qN4cFBom=⠪ٔ+7 jPB%o^Hјqwr^ux#C!FÄX*RyCE h,dɆ}s)i /H!y=` +/KAV"mssP,0rHRuHM%K>"'^q<  jGa[]_y!k4׈x `^Cd H@E@MДU\!H)@8V y+ 鶏e $Ƚ>tV F*-Ȥ׫!-O+ե};+ϻCw{+\_k5E|Q58 =%.RAHcxSU5&* .lMYA)$O3g8_}=$y ǠT(ރ$s3t^n22(PdE'y< ObxâAs%sG &RG 8),^řI]ܣ@?SƸ^-bY 4Dԓ@QӚ]n&.= &xb.AUOSB^dF:BX bFIQ%,VH%0)roJ?$m mUa.qտI䷃ރԂ?9SDbͽCm7z܌zw-GUZmWkb3hcުXL*.5Հ{Y08GM_/'YJȷ'F$.oPr-ؙ &m\^8i:X,|k6dkvWy "AA`t6^~56EKad1^#B/@/VUhloTcg~*kƅU z(ǡ>]h vH_~D5G`<#\HvR75Gv5G^a@v#W`ŏF\%r8qED]*QIH'^avD*шܣAWZ.CWQ\#W`q<*+eVeRN\Fq0;O n(EnҶ;8J;dő4.gf'exjT99AgQ|ZKfyQf&q_N=+ᗩ-06. dAXY y1u(\qXt_7{Mg\1-.ʡ _&'S\0s,n p^lJ5t.?3 滬oURZ TJ|}2AJ]o3tn޵#"<{09 žl,2"KN?ŒdmIq&nEXX* -BT"@{2ЭrY FˬRJyg*1WgB)keRmneeeb !~MRa@*Z+NN(8|JYwB NE+j}\uvר B>/B9;yֺcWDdzX^|}k9"O/A{[9dqU!=!_G_N\t>^t>˟tN̊p#Գ8AmP2HƧBt u(Iմ%Q:UHWjeu.QzWuқACJK'kGmK@NZB掠GZ\b,K{аR>ALZh-HIMpކ`[P^|᜴5s s/5ƯjH/n|t؅&m|6tuoѶIkjol3KO"24īQN]PZsA9j4ք7W"+b+Eqi=`T>^a+W˳^o4ɵlZ{y8:l,cWi!_=eT4w`` i"E炡< K-+pjQACA*')9<{BrORG[4'y$N%ȶ5]VqTCldU[)>5c!P)xD{:gUhchS T 9;Pg Գ+sY dcQLgӶnqwa4S Wuwy3*Shu*) LP U.+NPٔxe@$D{P?TxwNgd;n{>c'b`_[LWsr8)2,w)aP\>ǕJ\d sBNA'#Rkg߂;X~|<uw" h(- x,q3 e39>닦8 Ц.&5/?a"T}Bji&h ƶ}"gMƵ i\U?z,){[SO;wÏ,Z^f'kcvXmnN'آ MV7|>m"TNn'q'O_`0:Os-@t̼҆p$xKF1']EBxY4 ʻ8ժĖVfEP?ElJѨ`p=f]%L)e9%v qI<w*eR[tRC8>889D'cNꔌu( 8Ǣ)ڵ[6k>K4dB\e5h$l4IF>`X*޷,[#gl;u \ n iᐐUYڋMRkRY)4QW `<'At+y.i_D  |DA"(y֨Y Q`)хhl4ɧ%,BJt>!Ze E,B#YG9v$ }L * *mMnIUR> t\.f8k߃=b)|>:y i@--yo!_A 2+BkU* U% Ë. p=x^L H=f&B4TIx4)$C Z$AXSVg'E~u{KMsfcvCӳSxۻb#r-iQ uՕ/@h}ӥvrFdП kNxOoWv=v5}{}4ɆFV~6͊ޜ;Em}˛cܢ7x4Z@wcކ緊Yٞ74>]4a CZuQ,6|W7nļd;/ks:@ʖ\6-kO.Q+gK.T.%lZ Á|j}G+贲6dNH@"7ٰ|f7Fc6H3݈"o%z3b;h?"U8+Gm6N ZÕP M ^sj^\~|gw<^bQJoMM*/&U)֢͛￾]!J~N5\6qv&QnZɇ=E'ňθ%Eљ$gW)g~2Iap{6&xy=k KU嘣 GR8EZ4oqVc4.ӱa1<=O3T8n{W"\͛˫t( ί[C3g:bh %*c >V4{7}܈Я_'~}n=$igyt1[vH\!];H÷{27=]6u#ѢefyOpT'2 ٿ_ zmE^:)wiGJǚq,MLRo{El3M/e;σFbk56Nb/$>\~|D'_hS,M$,$k/ AV߼ki6]6G=-!7`wʵ2ۏ#@R~vOC{Ky>܏ߝ *z=Ȧm~9Ozr.5UG @1@ZX\Lr]@87ل.wWt#b%OZh|x`js:uEΝ$@W`'ckVAv^'$=2Y+u'~l4ԏoD( A-!)%o)xSJ9R^ 7 m-8y峃}w%':G/BH?{<0yn IfYR6:e’@Bq'tlƠ4FD'|B@s},A+6G,$dVXg" }[p|I*wWNj^fIcsRwMYD"!9& outrN w{{lG|nZ8x>n ́2#j82\J0kd& [0H8E[) VGigνᕖp\ yNv`=]D.7E_hR0 hgdL.ɨskom$DQF%@RL'0 gWٻ8r$+_KX`z`,0AZ<%V)d XreVf$q&BkAsON,oyۧURڢ7&gqxPn!{vTkd偞o~/ep N$, tP,J2 :CfN@oƶl"< " _޸v k%zjjIՉS,(^}ͫe^Qe7#zz?ѥOjr7_m8M-{mnef X~pNe]Ktaɇ<'e@SBƎf<mA$kC*f}@:&,Zq1 bӼ)a< ϠY߿宅.gF)uZ:u@6d45Ȕ|&QV S̻-p**XR/8 2tQ"Es+rgș >xs/ͧr7e-P5EcciB)Z89C1R`=8Fw0>_{GbO!yܽ ~}䴽79gZmڂ2?yy"_=㴣:z=_|܄ͬ`sGrpt6q6go6w\ cMOplnă}g^2x #_;~X~yyzE ߭<ѷq츉]-1DS8/jn-eY{iL~W/dxe5^s^P|5 l-[r\L-c),$Cjځ,7U؋oR:rBS-b,KYQ63#>k9%4=^0v Vj]xëɟ~h|Wg^=lg./ؾϳ6w=,Y7y|wVD9 6hw\{??Z}#~xsptv:;~u9<:Y>n?r-|灼~޸Ա pya_Xa+vt*h^qFݢ .-Jvu]ɋL[T#^vG5#^vFvxpF5Z }Ⱚ35wRgF=A_7YaVzvG%X,hIخ<לJL\2wδWl<9Y?mRq 3{ym~\u,F@^.oUwNuϠGb&TUޥ29ܓaN.bp=~L+fm#d%.y-ײ׏zDzApW/0x9\ km;\ +7W.Eq08W+sKaae\}pEƐ/_ 炫av5MvjeeշW%i Wo{W7X Xjn'8p4~3w{y|.l3n .fp&>3071;Xk֋as;a5f7ưr :b zz\ZGՋͼypx8Ht z3L[LMq :~]\DŽK%]`1xe>\//O:S{i۶hxG8\J>駉{9z|q}ٛok7x%o:!seѽv]M<89pų0nȪn-Eq ,q@xiLyGgβ.fs^15o,eGG;}l+6DaT6͞3}Ua3>m0;#]l*V1͏e~;3rp=[5&j{MbJ]PfÞ,'gVN]E+͛X4 2yKNwcmx>;۲xH~"O7ZK)Z"9rkPM,Ѧ*zɗdux[߫ͽs0i-6<=ԑݻQӔ/ά8U׹d˾IqJ,fLO 1 p|eǚP#эݹ`5$/z,)i1u-9 U3Lã-z,ՉGU>g̘.x.9t|oӌ#dL2*`yU%єYY<|HX3\ՍOA˔e B1lW!+ڳԢZheߢ`pRWt2İ )ٰQ&2F,)<@"4ٻV(Kx*?qR٩NS9h'lcNS-rcKK:vGhBędӜG1Z((*5"7c~l`bGh,v6L&>@F?/c )]ɳEb+Q,QHKE`70c:o9 dz ȂA E z buFmAQ+Э8NY ̈́115KJ1JD5%;cTdD3m,Q^Ӽ#3m0J  ڛjTT4gac(Q' BpY[ 4#}A[m()RqW*b'Wge] % PDۦ5S#J%qWq@!ѾT($Ed8$wsA e civ1H.DBBckC(pXᇓӶ\-33-xۇzD]@.тZ$t>eu`i0BMh:F%s􉊀ihZL:|x ]@NicY hVc! " %J$Bd"UBu(|O8j 32]6}4/3H4_q5}:WAq;2Cqn3NfeS Yߙ}ѵ_Eܱi8mPU˚wƶ$ƺb|;.vq[lQ."6YbQ{`Db=!exCKg^@/f9ІHTF4.h ;:20;wP P0 lcY[M3-h٘+y BnAz>^AJ-`S m1񬜑3Jo% 14>!AZ\܌1v/"8ά 2S3Yf )2@~ j@!ovUgo1YiXv c;fgL7H!c+9zR[ҫrOtX;FHgmY4jSl(3̓xΐ6Rꭚ+ *BVn42߬rV㓅 n WGoG x1ĵU FseXA?=|n7>zrh+`,ViO9 ߲GUQgAwP7|hѳڒm6[ۆh FE㩹Y5d] q-RV֮ d ftop|Yi&?kP"inpLtp l )i* s)an1zC @ T [WHT)n :9P&!##:Ej@#2P87dBc$4b!FR'X:@ 4|Џɴ 48nK^4So k֬oG_vduN ދa)HCY[ej$Vq==U&JbCKy_Ҕ9!A‰!peR?ie0η;erXVǙ]YxQJ0"ڔ#=V@XKB Cm,Fp̲B Ƌ [XY" 7 ,]r" 4ƞmQZX-UuV,Rc@MFGO4Yޤd> "6 B, "( Fe*QUpАr>F4JEVEUV~`IW1JVR%Klˣ|Ys,=46pOXS Ui9'D2G6MH. ~4X) K6&MY9&xH W!@ZC*R"_,I 1FI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIgK)1 ThH"bBi:tHiLH)>=AI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIK1i<&xH W!6s$ H2mBI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $B@BG%UMV $jȣ!@ZD@9@q$BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $zF$Н֭_Z7Ɇ!Xw?RNء.5&GtрKe&9xp 4 .z k zmz΋R8~o:~HEj -VDR }eRUSJb^@axBD,IUte1Dʓ:cZwG{4AY GK0! WF/M_T=P?zjuUY_L_9ۭ}vܿ^O.o_|Fr&.I)r@R ?\Ѱ{:7]c{/իf֊@cK(rIs0 ֽC\zt;XG\<~P;A}in92'Q6U p8ʬ dY oؘ7Ԯ^܇ivp6r `{ JP?G;Z6tG~3Mk:g; o=mv=tj/+o–X>g_eu-(bXELЧs@(Xpk9.jEܷn>Ϧ#x5wfR|o/!R/6Okvsֲs>K,h'򷡑Uি ^V:Vٛ|V1%[7˭㺣Ik0s"A2ZbVG7lY5vuR{ڏC_w?/Oa/g{k,׫ݤWen>Te2S;TgSk'thjn"_.?v+\J0sZf={ 6h-NWWe]0:a`ꢻy[^\ Xا]}?6y: <N2o筂i0ꅙ>nH.k]u+ T\v} p&NcDz e٣w-ՆLXxaVtejcIw&`BbZjULeF9@<) 7.-=;Kl鼬]UCBӰ^?A4piE\P; Qdi'B!H\o* 8OW:SR׷>-trԬIENm)Dƃzګ [qanx@C$ =eU#;/<,qVC絀0D)pNf۪elm2&v˸Ky[ؖfi k2AmLRu>]-b3gYps:zPD7xoWl`] Q!'),8qJcNFBHsґ4+zeE4C\d{8+f+J:55Qg Ul)ڴl[g~\.jZm޲fhуݤYkKŲޛ,OGCN¶/J[[dγ҆VaJ L!gHEy`kb3A6 >p>fY2s-n~1 ݘa<ؚ~Eh"n_l"@Eؚ8-ҹi5-.DvM㨗.Sen_:f v_VmdMkݛ]~;}OOGDU٣* UYЩ"HU>GRiȃgz?M"K*)G]ɺzΘJ"+|9mǤ}Xr+B9(Z/S@5TX$ b U*f;.Hɵv;o+w}%4Kk[$EcXQ͒4ɻ¿r}riT- m#JhI9pVU\#2 ?Bh nˊ4go^ 軦_v~ˋ z#[O_lz"OfڟI9lm=A#Us . +a)V^9 +9:|3h)5'T?. Die.Df$%EQ$g5N3}P&R<&O~Ւ>y9\Y@U%{dn$'[Ӧ6ЛSZ=ͻOb*8?<Mxq4y FKVvy cgƄxS>|[[.hֺ*0=?CE a"pAg`492l)X$攕Ĥ3.蓗,1l5~SX/:k[ϐy-7'`Sf'얢'E q.FAFEʕmsѥ>1࿐KH+߽xh~='Wz]5 ݽM{ +;pf}ʛ^|λNg,uI[ fwB<6՜Vz·%潙ӉQ'ɔDn84S18_/Bk5]o^F#+` J.h"#c͙T)Ʊ i$>X?-%}k\~g) ' 2ues{D.eb ~ͭN1I)1Lhh8 3xeR11i+9Z>]5qvb2=DIYw-r8˻!/v;E78?]Zk|gEBVgt(v2sfy_p!ǒjsR%]"ost>B.d-:ged"8#*1%Aht$P=X! Jt,=@ }AQ(PO㕨fpVgOYwQ ,əBX2x /#N`pr>̌"IԿ54t{DyRJCiP\Rh> p\JyLg!$ؑH{;\B;ܶ*STޅeVj}Bcu7fWϔ#=}>];^12}YqZsy˛΅GxRg2U/NQr $?㯻8+:2U &a$䊜k9uO' IjrmQ$ATCSI%b:mjqK57~vTY2p0TvM3Kw!}\ƈ4w4gg˫l"(>ꓰ]iwNYՆk'.s7ԍ Tݝe~m~_zt1cvW+ ss{~1n[nm0I^5߬q{RIOr.=dk,o4 ZbާiCgv/MfSi%=uM]-1uYH>R,A}~ C)NAqJOhg0^;*A6@'/wB`/?߼zoSf޿o~z#?F)pr K!Gy[M͍ѦZO|fO>ozV|jsvB1:Nї+,m<fdbe$pe߬_ s~]A1(J*P9-s(M̜!I+% {fdj>ّGQI^@YIٻFn$WMŷ"i`p`7 tX7[ ߯j"-$馺IçhZ%Oy^r$0N'L%h ^6b({[g1}_=oL nC tj{[]OYdzVsJYl Ygd1RfFZl E6Ns<6?-],Q ^}3-0~d,J$bm(BE!`tЂQ7j`~|vyF=#5сtRzj* 6Bh+fC‘LL<@# $.~Zt[>,3ubCvu4uqdoWӳ ;hoo.fg &?dqjQsR&glrÄgl%˴֠ Kaz7?Se78du0n^ U*.;JZg#]lucC;b($01m1VD@d.RrC79.OO "?/<,!d^z1:ֲ~) 1  ΦRxZ{ S 1{!{L$G,BqQIX+D%|/zz<;?,XWV޵뱨 6Eza}:-KX _<_̾y| X|_sX Tl6&OӋ [wq]oTSj"lxktd̋ٮyA PȪR*4<6RijU*)JFI7l#eSvD.`gu62D-+GM;ķeTq5ES5~MhѼC\UwN!Ti:\U)CSB^O}3I~uZ#iSFh0[Ƞ&k  ^e%+bĘhI7*XCGhe=J|tQX@jS_xŪ1r'⏆B1-f~&Rusp~b~ |8./K_4f5mvcz9 MVknꈖl|߄ (0G9$QŜ393be) ^1;7"]@N]|!ż3ŝY7>}t7of*-H6|sk0xFAd͈;A:i< eq9Zd9۞T,`Pn'l)v2̱喵(V^ޞQE}]E)RZ"SRzˆ(B2D MA-خFG9]z6Bψ 4 ccِp$S'9 < OO?^Ϛm!h 8_9^=ZS#!cPQF@'l"!9"m9%v4j}oZ?"VRVYoR :Tqd1Eqag_īw>o4Z8DJJ1[Fҁp![ @XdN9 `ZB1{8x_0~k(ok iDsL+4[]]Awm~? sm|2- l9OƓ-[0Z?tnv67KUE'lYpsCuޫӚ<ۤ)ozs&n>S{agϝ~BF~-zn SN>*'Ai"CxrDѩ(,"Wb䉽5ɹ;ݧM,?g00)Q9Ɋ.K$PX"bE9x(AЫ30f/.^~6-̳3Z~sc&2 OQ`7ӻ U{\□T+S܍ R=';B[ɲ95lsv st{th7|R>!k[O)%A!'y*+FOɏW"&%;8F&g(')l֗8Sxz5絡8?oCܙjgzk*4&EMϺK ᄁ._o=OW2y6Sx#$-$ * XVd!dOW] QbV9rA)1ei< zQg{֕yb] 2jp4GlER3jvAx' 1)bL6k3/4KP3UwX~plfभ WIg Qaͣ 4,)֫vb45Yt ߻IpE x]f"mpQ $ +d%ѨcұY1?Tuק]V> ݁t5p UP܃;0ؠUhq&J#HN1n>uMܪvJ %6TaG,lv3 Yr$9[l=,9nIۖw8EVsHVUf*3C6ItVH%'2F؅;)we\h_F4 ݶVʻ rדYИm54|rHM:R`t ΓRҞ{\43MUru% 5Xf1BAsme|/h8&v+J4E,NYIYZZW5~OF_Nw\m xfJTkSAN VIv$$?l HzZ HBÐe" yШV E!@FX`AL$p\V NKN]*XB' *zjA n#XnG&>UˇD9nsry5'Kϳ-|ypٟ慲ٰilyGc[ i@cW L^WHLWW%"ģI:UUXNk8Uc:^v#rIQS*D4q*gyFt\A2̱YY=;`\9gHUPTӯ'erZ~(Lol,oz{z " ʃҖ?m꼣v/3*u-׎zm?lRr)i2mJ>b&setVymM.k[w3|8sss2y:w"鐴sQhqhz @myBHJtz1%` fE!W|ƍRIDڗxahZklC9PŦQ Bj3ڳ~r=їz[~C`OKA}rRAc%aGDǒUT洠>"Sl$p>;^(H^ es,:hr,kĘ%=%*E5ײ F"zZ,lg숅aΎn~h8fr]օbCO/֫ÌƟP~xӿxό¨ 鐙 Kd`fKMLƵߠ5s6a-i 0 "?e"ƢJ :ڐfIZ.DdkǢlB*W;fosL4Zڍo3  I $%D.8I'͍L)iF:!pm;ܶefD9O=xt:[%iU._r*=%pQ$R:h^p!,U2(:p)pp;[!f7<|ېIz#76aHLctm>qHO|BdcCԇI^8rebTk $\t"Dw:d퇩x:rSW0QZb XtQalTAl>s"L@:xU$3BfMe?}tƛ#1OF=ߒN#+1L7N;;2ZjRQ>KU(4Օ@8+HX^ jJI)Qk9#dS+ F*,!p11Z43]J$BRAZ"J6_ː5 ^",7Om̢&mECD;0n|^g^M6:Xcj[)? vgOԧOF}V^齿>~m$ $I U3adUHP.u\I*-3,5$#"P}pQ іP !F0)ȣ`%ьBPRhB)IVwmԚ9Ia/;$cv=qY.T#t[. ?jۜV&2cz-`"e⡄g\Dd$[8dd4VF"w\PJx7x[3Ξ^/mpYeC Kլؕ{mooZJRuA`Ï'1'kQ'+ĠfAiRmͬR Օu1WLlDYTՎчm<]|m6t.->#K Y=l=c5xbsBETR{imÕB3ʮq(KTޅeU 釳Oib53 OO?ŧƧK*4.+8+nYo흻A71fYbg'b=}яU wGHq$d䊝|0ZJ.Éӥ9O/ORVkQD*.0K%{2L[d"|_zPL4m[/CGSeQ ԫtӗޕA~m?Zyvv:Z YIJ?> [EUD+-_<(}y$Qwgݛ^z_+?^]OgxL̾5*\?.|ڶ޶"Qo0z=}$^S[4n? ~I3\klF4WDc5˝- ژK+Y|87΋ˋɺ:[`k?dS hE㲉cTc8 q s)?t?SPM}ia^:go^7W߿՛۟_ьgjI ɄՄ=ma7U6yaͧ5" ~3|h}˵)@RzǫA8"_Eꝟt;k"( #d6 ˩ŸfEVUXCwe#ff~Po@e".iMMى>= 鋕ȕk*xCC]VZ5̹Yy;u 绎s"mM / N%L IfUdRc #7>e8Y?9lA$19F#j,Ѐp)D0s.,ʥN=z=Ցg8A*sz,xo\E&Ì__٬snpip:ax 0'`gٻ6rdWgx 038fXlRȒG8[˶n)[N:H[MEvWdʃL5 ڔ-947^SZv.C@O~OOۉ?@H_,:{uGP*Mx"F:>Tcь6VA<}8Vy uMPD|P"DZ;N]{~X>s.QQb*&֖iTq%HHs:g d1tSdzdy~q*j6~M;K0N]5b͝fövX;BTX3nِ|*ev?fb@qt%Ǒ]bډ@ ?Gra UOnIZosI$㭳NG'jM D 9Xi}8Q"rM3 *[I+^KE!$)uP)ln7QN;8uޏ/X U<eCFIKxq&M@J:KyA{ehθOCO|rYmQڛ\'>z#ct"sҳ䓲&pFf%rǃ}aqstD}kROL&P63y9WKG>q3;}N6J_.]%zGdvW.;zvd|?,;]MEӸਁ@]TDxN7Zـg >etd\G@f@D = %Z۳(.YR*{6C*GblrZq 9jf@;D]J$BRQ:eBbe@E"Ykc!'6LfQ8)}:&J.mx3&ͼlߓ4n}B,h"Q>[8\;/lѷ?CY$ӲL&H͙0][בn_e:{!3 :'ȉ>+ "*U%A@:3K< Xሣ#2 Ye 0EIR6 &vD8~>.cGzէnǍA<'ƑA&2cz>h"$EkH̖SqL*WɈ\xM*w޾Yo5_lӁsY5wX o^ʾ+ʗ@[ &_x-N{+%4˹/?~kr" ˬRӍ)7Llt{1UڻWb]8]ѮQ. =/^YŶ3 _0$~]uĬ 8 5[JeP[Bbb_er-4ޟ\A7 OfY`y3SH#'O] z!YrK@٘bSL^s2/Ct}zr121{(S s;tcIt2~w$ޑ--Yʕ-ZffX̲Ip0lFbYŇ|Ƿm.{MSvmmnuu+bc/ZC ϑҰ"pKc<g A؆J5M+ Ob/Gy󯿔~?_~|wVL0HDX}Eד  }Zj0i+-`8?uџ6w={81qgZI{dN%fT8׵#?m8I9] 6dm5 <+qeoE_K5]|}Ƙ2Dy47N)9:8fU@(szsd=r:D8"]q 00-0X漷 ";L:\֙X,(H&$zs !V#k~yhSukkJl^r5tgxC1KɄgBU6 pšMs'/I!ҡ@S^jX$reK"ZL4O .吶xwd*$椦o](C{uHL/1"3\&â2v Hi1LF DR^{2,`x 7*츑!ʥpܹauhR߷=^qG9m֪NۃAC8`VܻpN*aw^_ Ucp`rU *o)̢A=>GiuOkϽؑ{R_)13e>g$Wc։ SY:#:-@ gLy w7pʸ}E٥ 8r];]]DcXkz!q>H HVAP))޳2:Lt@?`Afo*Ye28l |6h͹ J8T90W{9~70eEz;"a)UwY$GX3AmbzINa6ҧbJ 'w'ˬr˻长WŘ^/ V8I'<fV|ìG 7ZY+Ekb,+2)Ђγv!eΚb,]RV^h5e酲=.Zlkf\n,Mˇ~,hҵ7ņ}iя˗wk3ZGl<:h/{.|h W*L7{hNߌh4>ϮkSɯ4.V_O ж~ ev) Qӧd&y@#w<6{B vx)ݎx9n :nDt5,A),x"1p]ƀ 0wL$ :9|fmc@d4 61Ts!>C}`F9,Z\̗9Đa?F{Tڸ|+mZ}GuOJ^GbvLKo91H{*0/Mh('+'Z'ZiE{<^;}; Wde7pU*ru*V =++.GpŅ`7pUpEV3\BBr{W`-{WUv bW W(4#"*r*b`R W W!կ;[ܴ;\ӳ[ ²j7J_3g[#= uΗ@b𖛷ҾQ7L/޻m-/OچvnQe*^M }uh6M_%Q/ftLf9V#=b(ms?aqΦ8,N׍=OCCpFhy81b#g:+B$?]]EVn@mp,beM{->/.o@XmݭŸ|/?|DF:h`n00xm> :z7VC/ONTz5` w2YNrc!:(|k9Y ͬBUu17EF8>hz@} ?>uh#݉ERJ3)cC # .@b@Ĥ}Fi0 X3#Y 7Z~RہBT=+2X2* \k:\+•&Rɻ"MbUJzpeP fd1Z̕/pUUz^#\YJ[b.UVURW \#0ɥ~yzti]=c]p4+M2S p\ %p#*+*Z"k9UrcC`W[q=b0U1Wa_XkxXiW4S_Oy4lv1?-ߝq϶xϝUOo*L47MF5}'F'O~=x`Z9zF&aa$<2޻pdդԬooϭ(gG3fg3NNOnp=KϚXm91]aV:VnZ",QAtD+.5ޮ ֚/Fմ]V;֯Z}e);f{ >eOiK &!CtVZ#BFt 1h2PRXkq.9ζiqz]]]_fo~Mt׈ۢN:IW_nOzhmj{ps瓕I hY+|^n {N],)~3*_4@CzCs9Tv(tYݗ^{;=M&ttťk& X4)bN("/ȭkhEI n >q/8`;>߽zC.ٜ7hs4!UʨGe7 'r~߉J'RޙTTȳQ;c M%^hϹȁ(a\¨ YA 6$%br<@Hih.0Aȳ9OtV9ixbR;6^,fۨ^W$E˯w>^.()zv2iL^%~յ>fho-׏|;uӘI@= Σ'`5)#T@dUQc,emK[NГB)bcGNQ@R9#c=_{b!up> ]^tIˋmM&)=3bs- ] S!3R':6 dBs' JrWnpaDFz ș ^`S(zQsu&nnjmBrHm*#v5sFl? Ԯ6:EmQڃ{߈Z[pe 5YV#jQmQ+m- B m6T,v!gH,: kb7AHE/8d1I;W0~)Rn: "V]AD1  x"W|!U!͙%b"τʙl.+–͖cJ#9!I5\pG &hLĤ9j4`( n2aD>O՞pqѬ&WbFɾ+pq#ZD!1 "qUNBJ"%rˢR d.4:CS~xx[QP2}lpr0+_QIH_(M]蕹OH1mh<5 17 2d\ C!\62nYT?tR9TS"Z1 3HEdVh;& c+JĀ\-xkb&0dC OzБ\o| Z<;%I=:Mg)TC7(歪o5Ϟ\>6%yzmٽ8 yGB YuvS`hpS˱ J.{BCphpq'~oo]'ism7[ԽKRKelM蹤y<@0XpcD;WyuJ>~i%ʴeL!YgE/ȵY&cy)h9(QYey((b:RdYJ_ح*c&@1&d?\+Ds{kp. u)ʴ牮M͂F|{0fB#d#b-"E$X+" YA$ 91[kWO?8[ELɉ k m x̀ʎ 4KAdel"g#8o)ym U9hBQ"DάE-QJ7Pq)AV"PU3aF8/Ǐ韶zVՎTnܼk?)/%j'@xdƢ\d4WAFsθ 0yLFyVi3fv( lړ h=g#Zj={f!|Ϊƺ.rMHw^ܿb|! ;ݐsNj O&l.B18%ϪtلR癝Y2Iٻ6$U'\FCq*u/ pXC3E|8Vݯzf(rH1` UUϙ.[a0nYf$}|w@4N3v|;Ҽ+CBI-DqS= Qg8 D0!FCwOM~9_.]G`7}\2:Yۜ@nNml]"ࣃ12\OGxfTS{XΤyJ.F0+L5֕k-*ׄQ qɌ4TpGO1/p& ͑ #I 5 55]k|xҿ*L fiڅocK"76Iԧ|dtjń.Ɵ˷779Jg7f Û<7izo|ƫ[f~ڛ?\v9mx~7ijj W]u2,cnn77wisb%`au#Xj7n?Vyq%-񰟏frɴB0r)>I3"k3CQ0ypoos @WqO#=^N!Qck\>\g !&)^9/O\"XARG"`"RSFDD b FрG!eLD7<jm_G{ 'JL{͍<?cj:\^X8؁z̾4-ְ o;ߙ~dQAx72Le}oBY4Qy;l)Kj͹v'Pww'=ڮ^SfYnphr}Ńdth|QD&ZJa01ʘ@|ჅIPFD RrȢ> $^iSf%HD0 AP$@2qVPy, C2cKXc 6΢1n$ub؄lvXL<8H"Q`zVw(?ۏ5mUS$4!B!$#&bgԌ9FU}Nw67ª [WK\Tָɫxu22.|{=LO~3=?OߚֺT@.&wUefa%?g N("{gv Ogkd}MR"Dv1&9RP;1IIz6 `UE85iatHp7Zazbu8( |6/ǡ230C^ӏ:BorjUFjks$Љ-r4x1q4}0X¨46Ƙb_ W/7ŃWe*0MZ-2_z},ܮ8plZA-&/Bw${wIm7ևO0i.>=YٻM7 ^ |u֌dc/z01pRӳIȆ1KjlXtkFE/^Өl7(g7v߼N{ux&޼ip) E ++ E{p뿟}ג657*MתQWE59~ol}U.ٞ[lQ>iߛty=/] q/> Wen$sUU[  |Byb& =R _K}qhFhRNxuZ>1(NhRS9}y Cp`;GJQ&:frp8+P?1>oQ@ cW+\f":%1),`MtkF&tݱ=#sg#GV|AY+Mı5K$b,C`(,k'F&a:ɾC" %d2AH5WkN]ǹkʼnB|qj@ .ytқ[{5N ?/4Sn4y5'JQoU۠iނe%c,oN7t|o眸e?[@ vڵfogz츫=Y^mz)7,V-ݷYxiE_KNJC-@ ;h22@`nS9cFVQ+H ]sL3F6'$}5-ѥOjϐE−QzQq)kA]E D:Ɔg$=ը `ѪL΃cdMEv߳˪6%<,ޟtЭ/@ӒKDx@r\ I!.4z5hx&`;pxbN1t$X1*8I-:80"9::@YtJye6|+'[wNSʥ%<$. any)JiZ#+S"D4G!:XSa1Zpaj2 v N7PxQz~'!ԄO\t xP?8r Le`D!Rf!dJX$J` :]:5ޔ_;#7'f6$o۪ +QֈMud|>֢) L/LBA^$4Z.f+xs{+QT/V~mUf IV-M:@ UsM2NS'ɀl u4~,}%@p a57TGb&1ȥZXÆk(:T.<0))Z[Rs7cjcILAυnΠCQ{JCJbƃ5vA)ıH1WOpÖucoYߍni ̳#MAs׌SX|<Êbc *)XfB124v3K`\Hv tb ֑.p@U?GJZSxMOozB]O%8. %U4. *"UE"JFc"MKc5o 1YZFB)^;2Ǣ ,6hX:, F]BPXJY; 6h%{gHq)ov 5F6 ܦ x7ȼ(x 8<ތ}}!AMǾ? ~t."H | ` ~t8qp:GJ8^2*b8]ܛp5+ 67D.$9J;"AME=E-#ZcQ`,jQPAq#VXDhˢ $yHB`Ip=鲨E+H^@hJr1$L mknw|nf*6C [6xΤpTR_36X",h- 1%}I;ƚǣ1VYXhm cet` F刊`) NP}t4})r;qW pkϞ|=kh%7eH%޲aXY[P]cJKᖪ+vuTP"n;Q}*6P嵆j\|nTR,ɻ0mMfw١HІ&HI̛eiG?D0cO&0=S LO D]`3 LBkvC_Qț~qWrMᗅA76g޳q2G6c-#rL++9ly$vGqԓ~~ Z eoCL=v6~}pX"EbUVp0wT uV81VE5#Ig0H: WR{ͻ" ,{]acoPKoGgYy1*Zz}Q}f˰iQ|gBJ!7QS*SAT@-E*Qi!r~+UduF˾Xtazw;v^n,T +\¶w̬ͭx=\A܅'5_)( ?\_oi2ll.f9b\Oڍxſp%1ZIZISH#l} [Gzf.d M'w:36XjH1=kHS۷6ƍ̀FjMZ{@YAӬMF]h]Mwm\Gs,^͛C{41}:6v}̓jޟsfOl4kyju!2/\ K/Qkw4 ˶JVxv.YsZ/@"fv68gfsώ9y۴13>7'6VVXg+gnOv~t1V'ؤ0x-6utP{̟s޶<Κabfu9O[.o M|)f#JrơmϭnxؿS C0$hFCbeVsipbT9r!cXNe"3Ka__MwIއsyx2)$(%CGg)P-sBV,-ۃ^#0j#ѯ*l K^]ݗ٦{}!~I¼ I{WR;+} pK @P͆bF|C9 ؉6dK Q3c"ʀ>% HI:STqJr]t_A{yf `s=]{ߴY+EGRF<ԫ(V)qyLyKNAj#%o lafuEN=g $pTkdPܡS@\i5RgyWd$ެ}:zȎ(}d:T S<v6~&JɏKhRyh"U@Dd0O-yCCȃӆBHz!dvWCB04(#V DDpG \F^u p*@hTJb+/E%y T_Xo](5 I! 9RF}:%*EKZy=:guqunuI/P["/Zl9cfsazm=fSYV~ pZXxAZW+@+Z}mgg"3΁MI{c1 9f)${ rBT$iHpVV_8%3xC-@rR!Db:Q$%,wVz=S_3gU[!_dmx  w&>$vy6U2aU:Eg!0ܸ AmSl %ͿEp,£h%7FPeZJE2gB-iYM4ֹiw~bZ [ϧCI+:X☜zvNúZdS+UݥN1!Ii'gg`D)F&RO% "WA+N1=y[ BN1YϢhRNCTT,#\B*ٍJ9,,52VѽJ̌;Ĥ2Bݶ٧YhsʍFoxafZ"B"NHVX8qJc#m⊄Hsґ4zepÓnpg6A DF0#&&,RD69Oq%<.6:Em^Y=ݪNhb~ @QA[cl?J[Kל'-[g)I]9F (:pЈ50G(d} hWZ9ZsAD#b[%IdJ,஢ `4#Ӥ !dr5Q9 qrb6E@o)ZJIHĔG-X8FE5FLh4!'ް_ʜ݈x=lQ:CqQEbWAGH$J*(^ 1ԒhAҐw0$OYht0TSN#woYNhw'ُ7Oُm37P9R3 LuQͦe]?w9Eg7!aW䚹 w駣 L? p\`Y+i]8J4f0G-Seu-V}:/c{BLC \ x9mO]O<>4?.P౧8SТ}] $f4~(+\8:hp3g ^A#0u aonx"rՊb\gH#ْPԊ=tUhһ/Qv(@~"gcYk=lrB#!$qFsLYb"HeVk*:B0\9$x񕱎Ta0zR*"Q< ӵ=X`4=:(.hvW< _a Yed4:/gկ tD‰n0$䓲8ä"%U'1BDC*o*iRS^o"'A)MV'qHGD $P#h4"J½#szk"W F{໫0\ϊ2J Ïtlع~1{f]eqZ4̗7$?]~gW(׮QlV@2/"N& 'ZqNytѧFj-~ERkm+Go"Ylz2= Lc] FkbDɹoX%ۺX,9Hl:4Y)"y>כoFn1?.nu4n4컪Ayx]ŧeci<4B_CٕHW{m?~[_Ow ӏrkhn+ޛU)jrVî7 rb޵q։5pFվI-iZx=_>{.8NKbX?܎rwcIt6 |eUyK7A ?ޒbuKorU3ru3ZlfQYIp':u g=kz]4lU:}Q}yz\gs6I}>ZI>j6q6=OTjԐNmeg_~~w/NH|o/F:ǚh |>CM߷oڪVMS{6Mc^O|vZvyE-}wosf/|&R/Iga!Au=[5oPr=5,Ħi~y{.t19*:~O*ꀾSx:z3ۑ;*SW=_-_,]k^='xF xb56*LvJ1Ì;=wܿ3z1"=rGT:Z'}FO>c4+I_]T +LBLhH+80% S%jB|Y; C|gdfY7oqwȓDC>R~!MrkH>^!ER| `KrPM&ձ6ӉdiAvƴ1ԑLjOWƈt} uڢ?u |0?qw *Y!:$aT|Ȫ|tyL{;;?'SʞOB ;.@D0'Lhah)4@3NG$r>'^@1@ [(pE[l8}+tLL{;^[٥g̫rPMh?%q)@z/=9 qBr{>K/OٗͧAer W,(\Z&APO7|X)^*-Q.+/$ ,n)*r 221:5ABQ&6D9@Y4K_xrckL lxϮ`/Ͳ m;28ފZEA˗2F\6h&$fgx1$0YAtPt޾vi\+d?r'oH bTQ4-zdC2^UH%Ql_}P]͕OxoQB07{]] '/o-o'X^\(^\_rrϵ ץºyno~ XD7}.yH4af\Դe-mAœvי9=1Ꜽx=y拉{ ٿr%--8Ȯƣold:O0_kE.IA% #H @.J`HS MAf* LD{OSNjh֟Ec =7r8kOĵ}y2T_;c7]W:vːsw ׏,V'ĬXvìH.OYJ+ܱ3+V)@fEs:SƃJ1]r sdkcMM[=cȣ .G\"#gW @Ol1T՘ yQd }pI&bN$|`ΞLf1c&mj:4؄F e)Po{ NVah.g-O){EhScL5wȋRǐ幙yiw̢vʪU[츷lg3v|@F:؃6C鲬razZBE9PvZƁqqQKt$DG &0]-@Ňh`Ef@Ī"$$>+aեr_ ѱQ${7֠u$QR {D)Dq\L.DdKQm_Ji9u\aȞ%",}Ik,Wn\3q6ƑfK5p_B:Vi1^ MɊ[܍FZ"͇ࣇơT{(S%D``e5Yucͬe򽈣{G"Z}*IJF!L9De 9h%aM{|/X6?a/ç2*} V(~U:v+Z_u#NސKxSiFi8 ˬVahvRIF%gv>ʧ[ԼPr>G5͞6:U9|C~bz]M97eϘ%θ+Aru[7Vh88 a~(4iQiRjq\ Ҳgգno% <x4`W?̂KȬ4lVQ LaN;̀ϞuNހU}x6WlH.vno)HHn^׏-Xݼ9A6q|{H"x._Ю̽cF)skFN0T0<)UVD-Q] vqΕPR 2 4FºV$X8]cPJgԎ Oٴ9󣉇g!WI.y.f]q;{݁M9ޟx-g>;rl#s,&J^N҃uYk 5sP.lsI].=:c=ȑ,ժD(mLJhƣ] aiFdO! `6v8 :;#6#!8xm:8-p>p(bUσ$C(qUuu50Ijq%򱅵 k(F uB̃F+zLSA6դ'$F"͔R"t2 D$(Fk$^+ D# BHdP`%sĤ{mp@cꃴq|}J o,]U#q_RT#j16.:?Y)8``{7V[R(UR@/us =䗐p>!E@ɪWQ x9HW!V"DpOSwh"a1xv:5*xUE-6`M@ZX.*,&7]3r%[׷v' i"82`*33llJ%RQ.[zӳrQL#]˦CTilh9؉S0ar|kcΪS~-CuTO>ޜOOh1g[jn۹ Q6 ?O-~k{b|qOg˺!˻di7 ևO0i DW}Uji^cz-FFhd$ú}:yQ̒C ~QЊFoUca8s)@7o:DOހ4T3g s. lDp뿽ؾkIkZu9z[[Ր{K|]rD7C_y\[^ + q?>Wl*%_dn7+pgT!|N1L,TńߧW&WWg;I*K8+/R^ $4)Ɉ;m)9P] {'顣 Kc#횛:9k 8`b,:7xfpyO|赓MN6u0=oȾKsh'P0廵<|=öΎ(/i1ӴW% #EZIiJ+r3y5g!? Kn"'Jќdsk,Q-h zz;zz/.h}ZnTˆa*xX)@t ^nuZhj!x^g[ԭXQm>}r'1&h^gCG>XvNSs󎒹5C(\Qs"'swyrɎ=$C֖"Rs-2GC mIL$XJ<u ChㅑJ Pc|J!LE8"cH9wC&< rGS$ AbMyU !b"wQ($#CI$oC쎤h0H7ijiX#o`!y$(T,^w ԛt#2\BXEv%V! "JFc"MKc5` !\[v5Yt#HEcpCZ"q1X7ELk+"X.Ic2C26 Uώ"-k/h}hk8>R8[.W; G"'$(TΘUQ!5i}]|޵nT<|Oj'#%izQq u!7!Ա468 :m#sQڡ-qfy#]]so[ W4ObOu[pe ӒKtMR.ԜKV#7DX34z5hx&`a+!r{ŰW  qԒCYˣ47am-ǒ½|f)hѹ|sohӯg4LH++o'28^1?PkA~jwV8hQPAq#VXDhˢ $na ȍh$łbd8w[83bOp1v[,k*,k9i}(MЯ{,:pA6ϙ\5O 13ZQF1myQ Uh4BkM,sglrD`p'ˋvEg=9FӸٓ\n?rOQ:+gH%2}aRm^6_\tQ2#$XȜyskXSIwٖ wۅ\Iڮ wȅĻB&qU9PjlmyG795!GyDm  tYETGCJJ0b@wsΚ%{-{ 0,$(xq 3rڜ..AN䜞`87W=^8tBU*atc;3f `Jz #UP3ZgGt#(ˍfoF _~N瓰T>ûͨ[m\Xa rQR& R _#g_DfG><qAYc>HVCh) <[$ׁw4x$ϦR~urU5_'aAPl\>L'âRGA*p>1iDY7{pcv9:HOF'Y W_k/=kR=&ntv6T&%@o`ә߆%5ĔZdO.#46tK ayΠFae"sџ$t=xeLNׅUmNUBYr%5(XIڒ~UvI|k9G ~\-@)6~j)+h3(AmFI:&Pd㔹4Mr0gYaٟd6;{g)ۅSw 2,WJᯠލ1Ƕngkmł ö{Uv"|6[a279vP'PgңOЃJ&A6B>ibQ*8KW+7E(~JX,b\ K*X d bT`j,FZ>ޅnFj.࠭IQN)SbcM0&%}4h ,@$a6NӺ!X+z{ѝBm"$Lo0省kZ00m(zq_Yt7M [ [요 YinUR>h;sV+o~ˁJz^BÎ5\(|f");ںm^mDt4]ץIwuw>7,2$Ka2% V$mג%2ItI4ʹ(@Zwv{u:猅4 +FPXXΑx57w17{Q\a6\jr~y&mJ.$!z\pp[XJŲy#"m{F"r3,bK|Gqݻ0e4x?+\qY `R%W90z) 3vmstcCa2_0ܮb#))BǩzgS&L{Y_d5uoGL7YȰH,hQ6H4(N}B5`LbM"w.!wkhy>jkJ>%&h)W-WWWWWWYY`b`;a3 n`,2։r$9 ~deəv"YUXjS٩Tmv6;U{WR`kܶ٩ZNfjS٩Tmv6;UD*Eǫ_nuև TCə^wGםQ'pb .ikHM_nwc d 6 {9Ǎ4Xb#h|"$Ղw[ DHFj@ꍯu R2Be3jzbBQ)t;~VйL6y#Gav,'s٧zҏsٿ/keoB}g+꼤3ʷ?wPbj`.8A%flP (lP5i6:Nۥdh!Q ֘An+bS2EW-,Шb2Z)Cb*!{ "tJTKZyg퉳j9;@\UՅiVu4VW_ϪHɜO:57EU Ck>x|c_` -\ewh @;X +pG*j]/1[hwŪ'y{z~IhSX4(a`#pPa`T2I'F~u.F%3xCmR!"eX(Mw;+Ҟ~#g*ͿNֻo-宂LƢ Q)L `hXiULehDlD[FT$L :h£h%7FP%x jcl)_,Khr*Ud&Lys[5^~} sժ$'WofqGU ɳ -ec?V_Ma,;Xa&wVOz\;LK.R!8N6RHH"(cN:f]l7޸I4dc#{YV\頄W4vĄªb"A5vclؾ_‚y*Z1Uk6kv`R'wPQq<ڨV18^ikIdQ}.x -dDTxԨk LPQG/$1&j ٬+5\^vOE#6y[9$b! lF3b0M0%ʙlH./&oDf%(-D)8IiA QŜ:ВfBs9+iMY#_G\jzqXL|qɮzQ7e[ޣ h0@RK\ ZJ$ qJFCw6CӰ>ԻYvɚQȍ7q Ta7#E?*&8EYԧ("m(4j0NF!Ra1D>Wx%Pb唵Q\pY;3^{P?4h*Ȩ NsDphOe9SYxG&g4/O MI*tb3bUrͼ Sڻ'36pUwDrn[tt$tuã(BKW 4N f /#γ$U ^ID ˹ȚD!ZgP Ks`K,&@e "JR%RPD eeB".%eܺ$I* ZI !#zj1!15cZik:)r6CZ юǵCZ;by;Z8oΏ> [s$cZH@"R},ݤRT1A˅VcBaHqW GW 8N>#  BΤAY*cܚZ\*hTAԊ;&[pyD $(Sx9si._nm}\=8cMbI PK.KyɁ3e-$4jR-W@Pȅs2`4Lj!9S1 ADԅmlNSQac_WgAo-+n祻V{={!(_/n+h+ X/ri@`@ eJ,e Rrmɑd4ձV5W6$@kk9P9 |ۺ㆒k:7mF^w"Y|lv}WI@ǡv闱ɴ{ LWX} lS*zl\f]&  Y?u*)S_F9~RWʖvXLy/,r..ayi;,JhH9pVA*^B}6e>B,*?2#>78"RL5lHzh _z+ٶΘ#9cR6,{8I';xlR$MAVy#^ '{ڂM(S [ϫ/b brj:p5Gnu2 +cD!ENW!8p-;-DO=K4.AM)*PD"2$&&q^k2ٖ]j,O}Jj36.}a?|L]q2Ztn!/o]?]^]_v>mx~BK9i|CM|y" Ά&֜7%毙d~Z()+˭͏dK*Uyk ͳ^qse,"Z}4.ROU*D’м9BqOE^SIP:3! s6@@?1Nj\byW i!S!A[.JYŜ`1%b_Tgϗe:Ԣu2~=ǽ|ΰ>a7y+o^<雖/ƹ}<_pnW0iMZnWv5d)Ay 8Ul1qJ7m£0Tc+~h䲹JLoT|2VnEny]7-8O5mEBl1`[ۭSMzib4i܏28ؾ>C{ E]jL(N4Hps!)H Hn^*F?P{ߠ G3٪*GaF2Ò 6ykQB"S⣡ᏩP |Ҡᨩ2 TwϹ "-M` 4]r1rtd5fuϢ癒n(pkBCsWw4?ڮYfYS;krtYFƐT`rLYPmZy缥J#Dp! sI^_#O4S` ȴ< U CJBL|jP2Kjs,IRy S,'@jONKXugIJ*h)nB>); .&;HTV9XbDm(wZ[6}NH="4Қ9M0&(Kjpz@24Q(Zu+H NEmExQ)MVx]J9D퓐FLʭڮKLޅ٫yuxBEqySB qϿ٧. ʴJvّ<~5hd//a$얜}ҊsÙ= Gobg"R@H.ԁhWͮ WbWb9R*Qq6;*'ݪDCUJzZ?-8.U+%$:.?:rKgC/wO\QyR0jM;KccOՃww/.sbfN̍-Sz?ջ(3W;q30gu$,$}/ރg]ּi܈m6Mz2[kDC7_ʘٖ[ ;}{WaA1j>hw3꺄L?d IUyB@ @B b3=R*i~OJBXi}Tm}&^|zۊ߹8ƀQ8ITXR0Z0TQSȷ] QZ.Fk{:=hrŸ$x]T,(wj~NYun D@@g: =iPgGw{om K Ēlf@ّ?x,w99_'+pU x`@coD&)YkJL-S^"3S$-))eֵjKQ-YE E. wHr-zL"9@mpX`$ (پhd._%ydY56eKvOQçȪzDqHQ}>/ʕ&3h_P 3BA-@;9$4<ǜػ^; \S~gɭܝoN'=\]Y;pxq8oKu'\gmR/ltk0xF>̔%vՖf< hUs&[ğk `!ꮗU.A$6j1OB G t~2%ips[뱃ϸ *{ Ze'Ģk41(B2D MA-ٯu_Y x/N=g@<:NYZU\$>"F0Jccِt9YAJ)ݟ(>wq'ό?=ZS# 1HILHhHhbN&O5CJ;F}Qɰ{4zZlġ:Mgw2lėZxn7r /Ejlv̶D ij1 L l@P*tO"0 WHwVB[#WYMd)kwyl/稏'ͨ/VD+*Gkl'g2rz?.B,Qpjg\ƹ~OuOWdѿoH}'ߏQ[i}-wŦR+,yZ􂻑'}k<'oW<||?\Gos(Vͻ6ohf̓pQOicn7M*vcŚ7I[Ojd_Ҭ ^o"F&0 .!>bo~xCCJt2[0^F()w=yft]cq7TjnS;]ه9[OqJj4 h+-svwb33UsLt[4Ͷvah]xE:O,rR^-X N JF(S!Q,%Y1"+9,K4YY 0F{I _w݀`Ii 2S2. rJ?Ae)ܮ޸ݛ{7S; ]J#I[2eD&JvSPB+="#Ž#Ɂ[bsY\pZN~[HZo%j-4u nmt%R! f2~dУ]UyH_^bUEkI3$ ) )2[C6K)' A6m-i)B)!1A/cJ9;'8p $g('Wۛu~}'yUܙK7^@(~Q$-@0cw(a%P, U) ɔަ Hٟ#ca,hbfR^Cz>E 1)Dm;XPW cUD @p`릪+`O[8p3^eL7 W*}pEKx^Dmp@Q'vS0$,3Vs@4=cy&Ze/Zu-۰~1hX|@P+L"\U||ҧ/Ihg?&kCѽޛlV7F29}yjoG3sX^]kD)t+ ?»כsV4&ћX-q<^or~\}9F*$QmuoqGVV:4;W) S.f9֫b(ߋz:̴zi4 櫽GpfBqy)zUA/a (D%I/?R 0ڃ}?##ĻoF?Τ.Fߛ/ai• c@HjqOi\嶷kWyT1;gi@ӔWF|V`jw=[}|0>B'z.M}?}Ŀ__΍Nmw1I2ױœyc푲RƛN ;/2u9FEmo癝-EէGLWGD4J`Hd+-NNB%x@.ghCXؗXIe"rŇ<( ښh 4Q"m6b))PXc+{cӍFt9 uTfNXȏ7=n'Z[ _75~ha[mn{{P72<ZJ)4ʠPE!j^Qե|E_?‹cr̥[\OЈF|{ihp:};-QVe *dr>b:{Mog::< tŸ41c126N+:ϽdMfHYEu*7#f'lr5}HĽiy,q#Бe RIy᳐$uL T:ɛ m@oߔ|J2cVϧyǺyi4OFW|^]AȄ9>yht{nmzwɆlCig^~qݿkZ뛽 {skqwNYg4;T q^)~{9.fw~Ǔʔ!eK]032`8T pU[:E5LLd,;SeV'2RE%h^% <fS⟍?Le: P#Hj.6ͺs@ͽPw^t"g^|ޗk6O.?۪g0˯n!ɫ9gM+7ӷד.L{:R)*ߟMl1 #Ol @V޲3Z$)4)m+أ%j($RQzjgl*>@wh9DUk* RYwӌV@,h ,ܹܵ"ʌ7w-Ϛ~ƫtßx|q|6z c r/<O"%Ⱥ(e*_$-szSE\7v(P! mDP' >¾pY{,1kͺs?bIZr1YCQ[5FmP{`wY|K6P٫*yKF>*ZO{Ub|lŠs&J^tT2֤C 2L Tu]ͺs?NBs b3DT"xHFjԘ|֚$ucx?e[K waQaۚ~^wyjAl蒂ApCk;rgyyǫtm>vƜ\q,u@reQgU6:S)<]@@ްgE1$KJH`%bIX&Y(ݡh0x 9{{!kK7,rO! ʫbo,᷃L=Gƌ|}JbMPRJWA O?ZyQEhomh-G/L`ys1-l0ԃݵPz`kz^*+ ;u~>fLワ/ <@M}_5?{WF/{{ضZ$ \&`0 BdI#N+ZdKrGnYf) KtI$јwn*] =hĎT Y)xN@VFY$$VށAg|0΃mLjZbBiA#9 >+!} /dl<< `| bs6ϑ)4Q IQ"h- *re"Wpe-@[-@[-@-@VYtt6bPt oBBm6L=-}:sV5 `xt3מ:oLe=O%sgS"?6@8imۧa1Q,0A  /C #,0$"z/c  sniA#v$~,G`$#ƽ\'7 &Rp@eΐ M\s JfI`P\{Iĩv!ă2($ 0+XGO{cDSg}1ܴWZ|ӠwYwgwe~m.V[Rܿ^j!.=0s%Q)Z8 6&gcǼ8kko.9Jx B>WߚwTP/f[*Nsk?ws?.({U<+5ZQr Qf/sWaXD c_+NF SQl 99W9f oNfs4'qh_ $"J>yDg#1D] V?8# y͏Rٳ8B(]SMliD;z:i~9٠(!R7N|e N6գMWN\d٭ Lݽ9Ĉ?Wޗ\/ޮ;f1[a.( vlׂDxռ,GjpqZ' twOBnrW7lw7f}|@ q4.`żGÛ@n\NꝽ2Cudz 1YKK>O}nj{fOOhoA4 8'{`N?9:{ޜ?{e= 8Gam4G ooߵMu-ZZ7ՀW&yG۷\ف_-q՗7`.͒4N&"Av^X~F5\zjw_҄h1oVvdPn@%tCW^|M|[,v-u$+~4AF@Z fqv| GiT؁6,m^Q6F"CwN8Qg-U+Dhsc'"x,$pB`6uԙդ-܍͉hQwdܫ yN4vwiL;<痴QǓ"{kz2ӭod^/{b|>[U$[.AJ 9ʓqTn *r|m0Q}}PN?N?dYͫͣ1<#cmzmz˻QǎŖ7I ޻LJowuޠJe^5f3@š%\ث.hRKo(Ɯ2٦xA-wgaAQ ׼ّeʏg`f\Y [}}NfV 0+n;n!۪Lַ6Kn2}L'P*}_<[pbcяesvaʗA|ogE%^7Xѣ o9y5wQzf=ˌIIogԋR2 %+v.\7YeV2g"c_R,>tіP?խ],x,w7Uچ>i)t4zO[>ëhE-&ej:-)v{ϽEjQ޻Un9Oz8{7Jt|=u+w?^#^=T4^\JNw]u ; 'A}(4(D4̐0\T ]'ݓBĢĢߜ]zw5_d?.Fhur͜lW/,<txًڝpAovyv Th#a"C21)#knunqyRpouhbo$/|:|?n\폷,2βϡ".".x%!=6! J )Hy'P:X l!ԋc 22 Ki8t4.J%]c,H0fzLOẂ76E?^bԚHq8$ Zmxs1q'q;:h!,!9|OSA]ĉNiE#̟lCWѺ9.H)F%N1aTy/@DGfQ;8Pxwm {:YIs|D' &iE0id:# >xJ)֫/c$("&\z" 11O̓b$˴ W.I1k;kMゴkýt-֊a{0 >S8s` _:Te,hU@.>_Ѣe P<)SLEo ZqN80rFݶA#vA4p訥K$gh"JƉ"˞.a):cR>zMQxieDۗˍV^o깐Fh_TUd]6=c62& .S ȳSr3!Ki|i_o M:ai"*D`pB3G WNA#AHnyH:.-2&kAKnCo+xFM‹%x.p'Ef *ocu@MuҎwSt9e!28j`E^H phc!(r\Nz9,õÙvy?p / ya_uvc5ȋ΍m]An8't3G1YwNkd1YwLudquud1Ywc; 1YwLud1Ywc;&ɺc;&ɺchlϒ HFA)%LIGƁ{BFr=#B_v+%X``'ؙ`A?eiR&e; }/da)Qv˾}TWB&h|3otn|ҍOI7>'s=HyHވv#nD؍a7"F݈v#nD؍a7"F݈v>~PUvXo̲16f,ۘelcm̲Y16f٣01Lcm̲Y16f,2lcmҘeecm̲Y16]k̲Y79mc(1A~I_{6>bP>pd+C^Oo3-E)-;O>v0o]"//S} ann|0??@n~-`WWeh:3oa0xK= ػt@8@rW;˓sד?\1]Y˼jteѕ5FW]Y+;Rx-m֐,m XƢMmXg ]TfɂinXvfXvDn,;7)IBB,.@6(XFԇD.\KμRj1kUܺ'l3*aܷBK bs]wqoWoG(X@zu"DG0xf$$Lt~ sܣ^Eoˁ`Q،#ZJL92_;tk~Ǔ:;c]^{UCmk0E%7,YRWGBoz:&G켨# GPHE2Uwȁfd *. %yem3JӇ*C߷Sڧx+gC=G F9Q2ZtL2x2 Is EYpk%FϯC]vwwikгw/qpsN0V>`8.@5+{{N۟Em}/Wњf@e/T>Wcz{#Px:}iqL\n]hE.EzgZ\'r4wpC?:f{g"0#1 d"_[ю:; dssvڅ(S`ف;.g# yf$9f J:Ж|(bfeYIQ zDb eHч8rYJdN&9x'SA tcxvdS_0ד 樓I[gI4NNd5A#A4|8\dQe+%"HIJZ \Rit+*4݄RWk]=Zojr{㎮'ƗLɜcmu!3h-zwN ןK{Vyׂd>&8A!A(R `MmMg%|zdi k hpw%i>؇W'O?L<+ &ho sȅT^i91mT +e2 r3!h5jn$ I 0$ծ̭5KW.N&%ES.f]]}%Q3Jf@,w,YdT<:z4xxhPZ?YxD]ܥT)H#q!r##L6953vnP\*ŅyL,+J0e:C&7RE"/T,B000E2Zk)(gGݔ$z{Fֆ("ď9X)$fYɕp0T 1%0Y≐+hH ()gxQLQ,X 6NI)<&[].tF47t(|2c;2Szrh"dpR21$%,ES)NtdD.pG17孢s ?M1-}1x,Tsˋu&ui'H!qɉX#9h0.NJ:NwΧ#3m?H30NۂԳ'6\-ߧnFZ/*',gumyZ`s=4"Ϫx&+T0)Quz.L]Оr:-IP\"yͱqbk.C@G2m5U,MY%@o)DvN\Fs [>A|- }Lq9\o(v%oc\!|LȸF[ h. wh!_ eI%kcLWv R/UWJUǏU?: xɑ}Ird|bn6zi9G-7]ٌVaj7\h; ~yQw^Y&-ny]d~'r[9ϻI8Sn/n777c+sOvGn ?67J3SߍVyVx] ;.bg*m9\):` ;w5mޥ8n.V@}heI`S 6KO\Sn"PRV3XO2602`܆>dKg.z+td\e^m:;I𱻚pD~{ ]vQ.4h+afSɐVGē%@E F&zz8gϕ>fhF⊻T$z`e>GDr fIJ \o+.Sx8dFG1|vBrr韹otRYrz @`hhE BP(Gv^BKMgGqɗge&O+m^,.vvWl8*v#/ּ=(滼.x*YG5$,1t!'np}p_63mЗ:` EO%&DΈFa9iuJk izRR 0㣘c/P$8S~- <"Y3'&2NPC/C')2NEPo (6iiE2t<46FxؘaKxC"` 4 w"E$LfZ捶Л/%_JL@ Q_F镏z_xS) (ggSӳ!ҋ=qngk?K?J&_`*F N%I 0͢@t Yb_N% PSLp%Wh)~:atzM)]7jr#c<22OtZۜ"A8ѫrr6U<>Jw ? |oDm#GEȗM'|g1@>d;f M56"KI߷zݒ"Qv+$Yd*k{1m5&Wk*n$`ߩ69vSOr]ŏp8;Qu^4+ݛ&F6sOg0FX si\ RtlY튑7~4[ n?^BnIܒwt knFfV'\!5>Q̻p:͋ˋɦIhlͭ;jC_:q>58}6\q S/3s?UdC .=quOG^uwo{;_޽%8Dh42σPn5ٯ7yy.MC^|vArCn}˷\+~ _?}y9.>r~=^^0O>pZˤ=ܓ{a4/ŸgURT2L ff~P@%$ᡮ&\gn:s~|~tH.*v#uȕ$ϑeM!@ QaJ`iǩ5&o4 *m씤61Ϸ6Zs5پ):wNۨp3/Zרsp# ,UqⳐ:uwSPg'ɾPlNȎf\;ͧSzkwmlܧiqt٧^/=')[D a'n1K[֧[4]GnQP\e<⪓,UtWp1].\[\ {g+lf ;1䃹]ɟ/~h8 8H;<(|5ݖT2jNJ Jgq JgiMQ:~(OP`OTfq:\,Hc+͈S2١ UWS,m+Pc쵷4oEώzWv4?י_,joݦkW# 5Rn%XdC%Օ.*Y⍿GSly(_wY+*̷h 7?O?<_I5?Ջ'Io- 4\ltRsq=!IiBO G* M݊e7_b1z7绍OO~µOO^ىŏCA>PL}u_]?Dž7&6`=1 ؉]LA9Ҁ Thpd(R)+Yp`?ۇ)'{U(,So r'fBG]W?wJϱtQ SQ;-'7>?WvԟwY/>Pf,sY_޳ϢP I&T4O^ӮL#L$@X2\È)R^P!K 7(CQ2eH:2ܝ Du! r8%VJb) d^.>τZyCe?MΧevW>^L>2"n* r>_RzcQGMΑRMڋn29p֨h(UXj#ނ5E NR^q*yb bl_w`Tɮ.¦, wӴ zE 0pP\M/mNq{[) 鄥HVEt%3,T5\s䝦S!Bv{)IU!iTH2/W  $7! `",1P,3x(x5Ǹ 8c U5eENH 8 D9˾Jnܑ\yYq 9ch)UZfvMcg%;[.TN`y$Wjɑ3z0eO_] DTa<8SI% b9R.#MwҢD߬m4)iF#X\{1A9'  I HgK]:wdA$ BCXd,*rWA @mNMaQ:[N3Qlٓ(6,G W@2>JsgIO!Z橏"ѨqACOkB-&gj4nfoCĚbd55vNY!$4}ۜdszx|5!|]SˇnHv$J,Ob8} QXI9 ѨL9-DaL(2*)| &!qzF0q, %au *n)&XLXR3cO,d,anX(m)3㆜Ӈc?{*ogfZ"Q>+B) OX Ґ"K@4e2e}v8$pg#9YMW)43A'Ą¨c"^Ca.&vvAXQ1ۂfǾ 6Pc}6 l0t4<* )*:k`SyN3h4(ԼL!#33&D\j,2 ci$u!ɐ(AԶc1qasTg`l "mAD!bf<'l9ߣb40KBLK+UhRZK(-D)85Iiʃ,eqyQEE5=jLhn59_:w1q#yWlqYl싋0.;\lѲh4@lP)r) YOH%FKVɨ;\<.Yhv싇PUz[Dip:z)MvONF1 ~| GƑP, '.+ũǬRB1..f,qWRN(DES YiΞ+vi {V3Z**Pbdzy0D KTG$ A;*̗feUHROr~yCL$s6zgLP^*9pKý"?dJdimWa:h=[V7zхoS^TLlTnMڶͱSHQ88i=z5Yf‹j2 I7DQW%Y%8 U6U!}2LXّCHj.?KF(4iP]72m6b+4G`X+UGZƚxQ1~NNRxmvW"J53y&X.o!Fqk޴6}a04X<.4nBȵ¦ΜyKp/l_j>}Q[6F6nz Jh c<9f#H'DB}T2ETT OL3Hd>eNG8Տ'鲫]:e#4=>XS a ܙ B5"z JSxցP b0ٰv=F9[{)<ѕHਞWO@%V*gr4uG{U2n&mA@E,Y 81 "q,lO2=Z! 40bf Zs&)iM|)bzVh8ۏGU& _)ni<w*|y'c᛼?x`!dIm9U$x\T!_ *A8wkmix,{AfEܗ{]3jo;c3l@x'՗`/7&s*a kn?'['o.دa3djnv ?Xl;0LˠEqi+\>R}+@uRP9e)bK"9yY+Yy9S9|K=mv+7+Soϋn|[ye4'J&3'>HdZsLdJ 4+?VVƒ6Ѱ_] VZWZ}_ H*UFY "ceHU^`}=H;g9m׈I{Kڻ%rH%nLhs\ Tz*>fބ+u, x@B&BmP.(3Y)<k1b쨽hk4*$3ܿC76_fuwN%CʷK\;iID)pF*31ƈcrfdcuÊ\[12j~żz+k?|kCϕ.G.&[fnzK>:_yk58TOE ǭ' zj4Tf=xZ`f]aMنYN0F=(#")9IEdYAa4)(" (O : J%Ba(`]*XA ,F"NY BFAfbAx W c#D XXGA|~_iB6%44 NHID)D@KK3[e@H$QH=!uKH+Շ D# BHdPN2Iy13P9FuQ KW)wI:}H WqacfV?bN c{^ E\0E!0ٻ7o3BVgV ( JG=|u ogW tUv`  ]"r0FRP;0EN<=@P& -,I̮֭cG p~oFDef`>t<-mmaoz:T, NG2]ZMק&T&3' >BJs#L.ͩnU#Z^|lzR?x9]Fs.958TkKei%lqaChH_ G%Lh88oڟ49ZG%QI2W=H40a4q"d%8U sSQeznTNQuV3:~z|?t#L/$7Rҟ6VsU~_ !?y%jh*ЪY ĸu9q].پܒ(}|/\<9:UOp`•g2? E.J~L_ ;% Q})f|]?(/"(LUkɑؾō:f#7,XyJ"aNIF iK,&"LtkF&썤>mhx^^xp`M_x㐳V c1k I)sg wGa^;RӳۚG ĥ1J=w|5Ζ4n[}L:4-ЫK㿤1jc9돧  /&/aN(Esfͭ$GU,h5qtOJC3>mOGxK!HʍR03T )BnǛ*ڦz5բuJ7Nj-X}\5 S"ELvpWA"'$ȩ1#s(ˍBzkn~H+4ꭻQkx]? NzZt@"$zEpD %^p- !t"ZM HcÉHNkZlE)(Ǡoo΁_4nV);] C}'@cZr g@r\ I!.4z5px&`a#!U'=c3c D`UPIjI!3J)Lh;Ons?9C,MkҏhdJ-nQ*S[ku)E B1u=Eų,NF̦.ʏGs"P UT50°f8TiV`Q۳ah.+t&>)ަC!,7$b3.>頪8$:ߋAg 6/o 5ׇ7@T2εwh6,nrʹAqꮝ 7 s6- TgA@tmr[WQ*.ҡ0PsH[Xvqg}7řL~3z~fS5;FjF$.oPr-0< "e3.F/EpPC;"-|}9DtvW2y"A t6^&JMޤ02XN^'L/aVU4 7Vhx9xtn=$>$>W zN}ʞIكiBC){`EΤIR+){|S$(ܧy){BJ֧̅C@Mt4,$wWA&GRRh@)R:edO: ɇ0^2u+?E-`ߧq*T nxum''´ݓ]xL0eP0t']ȓޓ_<ḟՂ_cJZrSg(9<*ɗ JlU҆-i<QI??MHX`NhV@+@o?^f"Fj=57x`ޠN')d*1Ggoq.eZBj]QZMhOn.XRT'z^}5M2^kx|((䄥q9=D\l2VM=6eDS􂍜MrD0CoGgPφ4$4ؘOh5h4XQ^Je,:u~?'kMlNA|GŴ.Ipi|&CՃO¹`1X>qCOp5a`y UI&@oCX37 W:$ Y̙C:<Ncʠ^䆱wisuxޝTZ@rFϵ,w`d4a ͝W7yM^ g۹:ۉ ofi~3Yj:Rjsfȵ1O΅&zGkv^gN8t;<q0 IA re`hF3dH3HSE*u R2N.r|Dւ cAa =*%O˅V&rZ|Lsˌ]7W3`<)(/&XӾPbV%7ůZ0S `Zbzz 9,i jV,ib}H,(,@'K?5'm~Hٖ/%0CcSNƴcdOwC%>ZwDdlu 7zZ45c'Up=r5M(֍bT w>2 g/e/eDRor /HUȍ6L5!Y ZIJ.z/ 7vu\[3nѫ<_d]6h&UHIp J'2W,0!=CʖQ٬d]x^Zyd0'&C"CjfQb-1<A4DA\tD}\oeX}GR9ܸg7FH2Mͷ6uL}vwxDEĮ [[9@Ȱ]-$mII_#i#4;9܎ڽk*74(9m54ZCЅٝG1x7l۰`5Xja]S+RRY'A RmSp՘jJ=>0U`]tUIQΦ(UD֖iT$CDk:g KJ/YY nVqpD}1zRB&:Y7K׏MmmP0POs fwPo饜FA@=oh $/ o4gw%=C-:{!3M%9BF DB ]'ªGrL 9:;`BPRh,r QQT>Z6GK1)m]Z B3먵'͕c|Q|SS]B%/f2K -ɮJIc<o;Eo߉;iZBx +},% 77B"y,7|{r]i7oî\@oԐ!vߘf#!-`֊H GI+ZC᳻~,qRj 6A{N;dA^Jm9[؎ro5t #6g|Da%AV[,(cЎB6SJq!Q:eBp&}T,6fhdm68͒>&.WpVD茛Rc6ؗmVFX?֫Z=<\7TLTJ)J*-p8UeZTJIuժL6tR5h-4`iJrĦ$}y<?TS(OŔo<>K f:#'B I̦> )(QcTS}VunrRXMQ%wޥ8n.flG}져4˒Ȧ2:.mv!K'$0*(!z\E271R DPFT%q̅ƒH-2>3WWΆ:\]_qD ~:ɾr~J,ucw{ٿ7?VJ<\X1Y2L`$Bڧ BDԨBhB*as5ST,Ũ =>]ȡ2vKfsa`˜e񖞻Z&] ,83<9mu$@-D] ]0fI6`femuXr# f(e)nAo+.SȜH1ƤOU,~U9!Je SPQZ+Xo*iLB9:D&.Vm8친 $Wt8-#9 mpIkq3]8*6+5|ZS;or>lUYG58,Jt!#7N8ct>8N/ a$/BmW#2Ft(#+L[rosC+"yZIKUVu=E9ċrFǵX͡&β97 F8mB!kG; LH;*RNEX/Z~{iִB2$ Q#alDc̍<X&Ƅa;njR݅UUL3e0o%@ob<ȹdBeE\Jm\ruUb >~)V"+- =37|b%|)MW~:ӤBb]epZy7M~I=5Z4qv" ud?|nχ8ѕ'Dؾ%8N%"P SLpj+vh)'~-g}O0<qLӻۋ~lMu\l`rq0Q>̭ts`$.G޴H>'wF^I3{FtwDg7+-ZLV? Y,hx?c7N^[!zxVc{c4[#cEXeqOrrѿ5hTLshryϯHDOO?~~ą?|f&Œk h/p6-IxMQGت+>\t*Ƕ.O$n/"Xɏ^S{KH$)JBeGsEi889Z>$לc%2瘔+aʧ$0yM| bn7<ԉ,`|$"BsII&ud_`LFMSPgnn^)yPM& lC3ݧxߴ;6ODӧCpUt%]͡z~uSǍҼ;HUbBɸSŎRiꇔR$<a1n3`Mhi5muډMT SV|ތI26d_=>{sa(!&m-yyaaV`w3ٚ&RP5 ,2I=Fl,Q5LJ+t{d$[ֵJI|Sp\v]5Dy ;%ld*K9o]hC\C$hMN>1AsJ|0 Ȩ1-֐5#ݟl*6Δ @FpG/gƑ"\|? wm3d~_rG& UIvqlKiM0-QGc,OV/uˁȍy@e=,B2H &.r;l3 7޶ j{,GEG3H cT*R*o=OJ Ingdإ&`ɇ녰nϭHqŤ὏umHぉCFB9a01\9S{;䑙Zo5y`$I#hTHa:ȐH0ɏ?Zi޴ RRY+tv]cCw(̮ 7'{i$ \''1Ms&r%-. J)aZj.<1x  6eT=j;ߜƷG#p: '-SA nn'+mp{t=vNV4ڗg,alM.|@o hZ,ݦ߃''g'UD4ϕ7Q2rR0/xFaNupi%Ʃ' Ot8UWٟ>dOiVԦt8,ag%d@Y ^Q\-;}B:m&O w*Q4zeTd}Y N۠@O^Z z]V܍Zж[0,&2rU|ٹC͞?dO }Z^>C3Mn\QQ)%Iݒ>LoA8.Ǽe#.;Wi-p[?6|rڤE/&[A>\\!i `OE@T/2.;wH6ܑ4WӹƽOZwc%Nxϐ `~TlEթoH} kRYi]|"m]`Ʋ%\5nM~5:O=T:Lj;b۾ >fhy QJ!ҕR_έ e֮:zbD%1 0ɷ@ _^ _*!6G@0?YfCWad7*rtrtG20}Tˡe 8nYS3\@?*K%GITC2ף:.qzK7EG*/[@$J^e,m_eOb/; _db8{Y1ǒIG(eKvU׮ھ4Vo\2!:%l iV4dbZTM5Dٴ Z.M a唴}Gܝ%fHW)xSМr)pt9ͪr%mhk4m~TUy4-XrWM{I ~^PG p5$-_%pn, bϝ )hC Iyh.`"|:Ӡ^|ijP`M.<&{GmK|RnÜ~#`傓[-EaN&˄GхwZk*~ i^H/DYvRE)XWp+mArιυ*.9^z'>x @s>ds NYȩ,0tBK^]]0x60Q+kU[ Њ;]!ʦ]ՃЕ%6y!` >z tBwtЕ^1!Z7x3 tǺs3at;ڶ)%+kZCWԤC+Fmt *BV4eASE µ-thc]JYWHWjiL ?Y}դ-thM׮nZ(K'w9=cɕƫV_N~%ViN2r{.miD+XiQn|4 P~3ʴiU hc# %]"]wLWFqCWAVݍmCW^q (vRLUie2E}A:f#7 ۣ/o߾:-tBv*?T(YW{sB'_LL:Ҁߕg0w.+FB2}}ʍ]?LcEB g" g{,7r7XQ:[dÙƘ* 6Jk17Tzmʺ(z]pĵf@S& Ϊd⼧+m^|^-%yſJ*[\O,F@EEf vRn6#79Rљs[tw]XOf/#(7B4nֽ<'RIIbE1HGosoaL>,QڎMѵ(%b'ԴIE[RHHH I? ҋHFVh+\.`5 j%6 Bb1C6WKr5j|NQbѥ`D)Ȃ>4Jt)}!% >{(.խ` :`ԞBv=5ڐ]jy;aJ$|[(_ 6XS " Db |ZC:mV(5 I&jeW+e$htyֲtXBs ]b"%l@ ]k05Z0+3\iu Qx0 VX4n:dZ{΂NPѪ J(ڑkj $*Z@ԓ*eX\lG?5mB7BJJJ#L2b!]AАV6p.F1G!HTP&׀T߄L 𿒡2UWHPcYTB2 V^X szeY r3Jր1ukP VCH(PED&TDE"3|5gE;0!AŜQ' sG !.A f|)*)ԙ5TD%/$#3إ~.V!joSCAV(J892rVjhggQQ 2#}@PSQzE٢r{RQv J H/[.ѩ=#i̼BիfٚlZ"5ȲU"ZEGy er1! aUh#ǻ=sA ԙy?/bF\*"u44U ULl^vNR'D̿h>`V9Àt&^%=ne!9Q]Z01ս.0AABZI|txḰKy@xc#TYiU2Mt%C RbtLECNZˌ %tF\8gP4I"i Yk2!`(`P&>yt_V 5ȤuՅ:@vdm^,:?jPJ4%wV4ZbM=A!%DB>hAjy1j"!%:]%@_>1&#TϺێSQAᡔ5K q[O κ$!H;V@@]BZx _36D[vThň޲A+}Ƣ Aa8J"Z!k,BJ35)JL T "Q8TDYUQõ=`Qy0 !.@IY6 : ܔu9QbզX5{-6_tQ{H$Q5$YI$e(m@Vӥ[oU4^"[BZF7j QH6 W|ZR0UK mry;Xq=xy<;< ۴żp$Yz>E`0uvtrlѓХE4IvPp$JHu6tk*%8O)'ޮ Ę#@99KѰ-G' <%2`Crh[SP.O(7"fh8(uR"˕,.TP=`ePˌ`*1#K[Ƞ r=`-qÖ Vc7 XW6b]MA53"叺|AWr@ &eCE5FHmr3u#&ݞA l?ڢ|u* (AFW65EO(I}!)AH 8QZ{*(=uGm(6tqHU׈4JC`ɍoݨ1jEr7 "&b9\Y 5I)x* i;uY:Gn䤀%a4QAygjp N}TQjme{qbllH-tv{/^| ḩ36d@3ŃytFRl6\^@̮6itvF;o٦I1o7zeq2r+ZY<{&?#O"D3?ҶKlyroiח}2{gESnTҍ,uN,=q/Zt( fmo4}0E!8iwQٷ6r --R~2K ,-Zɪ!.-8]`'&CWO j (LWHWdfjBtE'Wl!q2tA}+B4L>3k9Ԯ#t*JvGpM ]Z]|]}<Jgv,SO RODW{/@WK^*eL]&CW7Z)b:Dg81!`d p7("F;]J瘮AOp+"NNWΙ)LWCW &DW8Z="~׮e4LWHWQ;7΁LeɁJAURՁv[4)NiӓiThhNӄr5a&4S#JtܠBW@Տ{<]Jpt2{ Uӡ+k&@(#"]cR1Ng͎U0N1H(HW}x@nbXtw&tEh-xq,PY'#`yR;µOԾ#48vC~F<6eF&DW7ƩЕ(ԆJm>@ @]Z9AE֠~jqq/\]k(`}y-dhË\EY{6.חG_Dp}##e a5{VW}#@[Y>iѳt h1Mw3WH&< o/6qBt}|tow~4 ^м8mV8?]Qnyº R9?5(!jCbjhw\,,]_ 4k7x4[6Cmx; ?O.\?"fVw ˂o] s|t=GGӡTdR tMiy!km#GEX@a`7`lX`7AG,KZ,_[jKZR-g(R],Wd%CT`\ïeOճ1ϷY""m+>U`v{=uyvה2zԣD+>/i~~ldI2 i` !.p+#?G[w)gf-ˮFKƿ4t5θ f. Tˢ'7 +;a.?\ b#pDR|ah@W|K?pi8:EMo_U<4J[ lYr_FH _!e%ޖ16CPxmt&k<UGWELՔpoݖWФ}su3{𷾻h:alNy[тey[Ja:(Ƨ p4,DN!)`eT eIҐD=4|.Qm5?]lݪ)WBs&gqB s$N5P21C7D/(18-@Y"1Z(O:E-wVz=S,rnpģ6t3FV|T^F8C>lGmWBwqN_Zσ.E5ZM8\_̯ Wr?Вtt>.giPN2'J\L63=@z߁B@P;JdtXiU#(\7@gy%oNNKK '|rXF%4{]N*kjGe31f2Zt|_C,wYeP[$i[(e+}\.D{=5o^V~]Z)Dֵi'gg`D)D)pN&۩fTF$R)C#uLֳ ОR)\B%յf쌝횱;ҝ.J2ԅu 9GՅ GnYU*`lySٰlo:CpPξsʹDb 8!raQDlj&PiW$DʘYjlnx s=,MtP+5Q'툱 ª M3vkl?%PvgұkmvGI֖: @QA[cl?J[Kל' $vLh!#2AC3A!pDIFu;cg>gNb<ؙ|FGxԈ$ȔD,䣊6ьX4L6LL,Pƥ*GKPBB@ NmRKY@Q4;MEѹ;ֈ]#^vGXuHkIɾzQwQ/-Gm$J*(^ 1ԒhA`) C/zgGұ>4C>|xRUޔ3ȭ/b|r>Ƶ̭@:ȫLߺE^II>_s[e# Sk8`SM!6~n8n^UIs-l09i̴o2,$t>MJS#?gb0?PRfBaL AYE{XX/ae|g7` fʉyKt~%lZb &Q}詠q)A!A'mC!,u!Ra~D+ZQrʎ^k wvu\{3SЃ<\]QKTE*=.8ͣ%"FnYx¸#Ei2H5Owzͯ4 = o[1 B2hIOuDHڃ+vcǻ \9ud$U`ͼEA-wiMe {b񈎞 R"Q61|pE9WB !(fɰ dqA-ĀXd˳0O ' ! Ibj~2Q4`Wji_,(VU)G@XKٽ+|pQzSsno;#j+7g{00_QGVNzշ+=]ho>G-\9OLxHxW` *9Gf `itNmJ с R(CXsh~4_o,B -:S";v@E*D [X2h01Z墫Np%!:2D!Y ("NJx4$n_<#z jqnI+A[i€UhQ;NEbPUm"Je,]Z;z 9zN浩 #YGeT+(veQ{ =7d W] Sn^Й]I_ЙˡKv ;kazg&d fŗhY+m"YUXS1KǵjmU^[:7vwk/|/iy=dwU!6RNv g a EHc+~9{m=jZK;NVF[BdF/eI2 pX}+pʆh}P&(p^KΥs"IR|&A% 5Ƅļi O]otŊ>m6B>5@aYō^*rUzU(5Ҽ>y XQ4$"# .E<@SNSу OE(<=TDjC e "SQ("L)+X%XcxS}çrA|Bz> NnZ90fҙ)O Rx`sWϹ:f؜r &fke8["9 ̹uwۉ=vsbъ6Ϣ>P;9Nu(@4S."&h"vYqTC8H .P RA$q~U&!W7] yU#j9qt_||W OL[euIEf]eQiR9޹E ׳rb!#C{j0*|1$Wo:'Ԡ!H 99ӊsÉ+KÛr -5ZU)`=uU^@fa7` yرY`4iN=-t]/7qrTrr٬h1vvʅE.߯:rKM8}-ucBiWgKZ[s|20ό2s Fi88;ۖ۶]Z$ƣU_5;%)BKr}ImŰb$k-fYX⠍0W [0bǣ͢2٦IZ*#K]TԖ m2osb%_ӐI_^V}zB{z*A6U( Fur/ &(DuO˻?ۏ+[nXfT #l_;,;,ߗ+Q$.ʢl,$BFTfV./_o`q Ec޼/_7~Z՚jx܈U^mlWxm6y{-.FQfa6 wOg9FO9guכƆB {19, Bxl&ĭ^; _ܪmR{=:q%)pP&hL4 : +TF `*JAc rH &tW0㼺Y>u_P'vIgA$!sTǠA4”MQ81g1+/t6PCQ 7KkhhmN\24 v,?qYoch>M>MW>M7UvK6G;~1G/kሯ&~z`/lmAv[FҖRu6bws`K,& & MJR[" ZeYB] a'Z,mҭ G03+D=>,. 4&ms&؋>[}L酗,h-ţ* )gRCDm6q=lN`ݡB_]&kI+wg#QDf-ORK:9RYҪU:lƣr khi1DJhKіҭ[9 OἣQ^r]@'+-a aNg$v#2'Yi(%c2{b JJ0 5ևBj X2>(ĘnxzHB"Njx;5(1pM 3:;8hHV(ョv}1b1D(.ޤ "j*옑!Υp:i\:N$׽v8݊7H[sΛA9p[@.E"i?3&ٻjW ӈ#>!c3 {{b \ޞe\)}<9=d9v0 4m?f㷛aN7Cȅ ;a]4 R›ul-/4ֲ%q԰l x'p#|v z٘|I'ϻT>bp ShAfr<`/Ggdzji5l>NG=TXM2ОM UX{nb+9sӓOJD,JCS[8`Y2l$Zko]JGHw 2D"=XE3Q"RI-"Kd{i5-MCA잁q4]_RF] |@FVK g"DpZQ┷D#SI-U9:i=Dhr*3ƒ0N=1xKޠIAsϲMȤp6L,0IK":νs}=K~$W˸ i wwQ^N_PZݟG&9 saL;'1 J\9VRy&/PLDzixNNLJ3<;9H=+9 GЁ I!$5E, i8g֊8y~ Sq<2s2[( 0@T9bNF; )Hr%?b{a\[[|Тoebɚ]UhCp@a.Pk Emkv7@ -Km8.Mԣ!r&rx%-*{0-5/<1x 6T6Li|}O2R2PrΗc |Z"s'˪y8LhK˯RO{mj4Ɉy|?a+~ ߾-#tB}%Fta},B(o /?2$%}/ҳꗓ%.{g̮=}r<8z `~sO4jl ', On<&]<>q vz]ӓ¥s k9+FHEٕ%Bw˳xvGΜ洤zr7zSvgGy[QZ zUV܍ZnG1f "1y[;}@MZ=~)۸H>mj$VG'.V9]0 ..zc*Njw 6>qq.k=g7{vr@i@RK5/oGnIB7HruOXI%Xy(́~վq2''3>]tFON/9wR*DWxsanFB?7CStI:s.zނh.}ϼ|õt6OvMY8Ԓu{0'xOĽa4&f<5#w~ˋf#֞;Z]~ц7א3 7Miڝ>O[$I32(C|\0 9GR;7ȴbWú}}D- W^FG .˜HBg9P-sBVzL[" 'VHg`Njmnܫ 刴``e\p(J=<2 Tbj骧uW m:/;i^?տ~{:=On`;">Fb#u +[ˀ_??Bȓs0jyҤ6^:0݇?LJ=fZykٶꨙ1e>gI$^df,iNI)sPU1CI(ܶZzrmkw6>bl_?h3T ^ZnL) 'ϔ=IsM:nk 7Y,ZG] dpfrrTkdPʁ_êu|zr2˒?+v}pĻ%:EHߢWv4jIvYG M tGy>^1;qܯޤE&W?㴴(vpQ&&>R %F5۵JˍZ@# Q5VR=IW/DB}ޯѰ5̪ܗ|4!B ߶ o㊇{U~ fs4M#loE/lי%V Nbmu09,Nn8{yeܸ>ןea+dƮTx67W)p2~8.-rԜs_ Pjb`zZZ{Q9 #KoaNfE [? iiz?Oϟ7Dp>9 G3 DZD}PrU?xcc{؃v؃v!!R-DAΆ;H"@O4Ϩjhj3D*HedUqQ8!('0 #)66g,M /wLJg&Ό ʺCa_0"<5~R7`z㗘^/ 0ՋBLocݒU)fmAi0TAʔR4Ҫ:VQgVBBWخӕR+Ѭ ܔCWWb JuRvthE*t(=] ]Ym+IBCWWR ZuW"+='hBk6lփ+vj=RmCl芮AW=5EWpH1tt(%jn*LAtm9tp5et(Hݡ+n$ BBBWVۮtt%,DDWXb *^ ]!ZyeQvMG+*W0pK\[ZN-.GDЮ4ie%W]Z-t1tpˑ*P.&jwJ[iIt%/n96;DM Q6+c.f,T}hg QWCWlšg0JOWzp-]Vvj=ct֠+}.դB\uec{zbDS]`U1tp*]+D0g{ƒk +\2p)]+@ɈjJPΤ-0gBܔBWVv^DWw,c ^se0o+H)ʗǮny ;]l,v: DSXªi[Hחbs1u^?5J/"kͭ5j!U?]C YiSirΙ-5]7- Jz5҂ ;WR {!J[Bw4cԖt •Bv!=] ], T3#\cK+@:]!Jڻ"]Y-v!µHW+񞮾+='*JNW~ctܭ_-ے2e'芯AW=w z5+=VuB]+ BVCWw1th:]!JN{A\ +d1tp)%t(iOWHWB=ڔbHWcBt4=]"]Yj*TB(=d_5epi햂Y ilm14-'2uF4T$%r`*(-jJS+y3c`ߞ'' ֧{U oj2ēKEPL/Z?zf[Pnn2D #Jg$'pVOƪ`NN_>M@yً~<;xÿ4W$aBJ\ Imu`^Ʒ+aK{W\p@A@rYr>mk-U^rq].9$39$8eK_\~r^4j9NH9NCr?M26~?FߍEzQa8-IaU &#ަ>p^+T,E!qh.?/^6~,lnrw:=]bw9e٤KEy|eNi2g@~\Bfơ1gja|T٢0 rd" Vr#9 ZkspNkϗ}5VWtQ]VAhs&Q%%YaEYR yA N"wLb.Y3Sˬ}of GaM=3!U>,,lJJ1TF. [P֫BO/G#j^M3saa_Ӏ> BܯvSيZV~ p9}y_)iO}z{JRZ4wK8(Ajs}hx\b!.\FpN(XDr"V$P%a5,1 y:f4B䤓R z΍*8:|cvI[+NFT9L#x#xi Mnv nr O<휂zEgIƉcp֛2mp?5"kQH GJC$f k@v 'rN/p>*{S ؜]Av_•#.M `z sUK f,ۗ\u,;[P\`?䲳v|=#z!ۻ*J6[*?Ns[ڠQ\\Uʫ.T(F+8 {O>=>f3Kg,XmMQ𐂕^dYd&FbWLe-m̒[9Sg<)z9T΀ב[ ]Yv&A: +N +JuCv"[Og>ϝ1VvO939+=.nx4HƇ#Hz H2إa@#9X!e`]5Wf#{_7ol!N-8 fyAFcm;N23< Y*.mmu ];w{]n!<ƠG CﺏޔxM cG8nM9l[_Oh`W mtfP(U;:ЈYGmyM/)3$N&ٗ 6>-Xj͍ނS#z&HEM<݄>7=SrUS>0Ub\ti^ ha:A a*(Hc:gUgh齳2g=gjqn[g'Ɖ<% 9f^QG0&<2)X˭1 c.:!*.#J!x9Mr ^Q[;gǽCu?Ȅo_N<-1}*)@И\NYyGMB9gxBȜ)']v6*f`DnhdPg'xBiX!tNq5DkK]Et%'r8RT9xVf=E&# nۣ=FMNٔza&0YmɩqP͙mxeP*o  O4WB!(@)Щ-h1_ .g,Fε Tז3q[Jw+ ѭ-\-.%*u^d|Yfl:c67,Ap}a-&I I@,X8N, K:d(i_"̊lJcQ(ƒY(ʛ0d]3q[8<ݙvkeV[>xec^ehqTd9XYVS{!EG2!CE'l 5%Cl=9:g=lH 'A)<EL?"ʣES _ǐQӕB~0V ^X ܰǧ tgrH=iH,]OZa ~R2RhN 'N>G[N.i2d@AEt\g2Nhky@╂$SD=I|R1࢔ О웉`=@kۙ8;F).D$.#9- RE.Z~1hthwӖnc$r4l`, yz[/&=7:dK )[b*|MAuNؕ jNHxg94"0@@C$FCupu<6 U<+ x8}~S~}o}7o?p>~<Οm""(}v/ʮEKv)uYyPS]V{Ç\ߪ2;HK )j_T"n͊GYhYy% X@'` M|vyT#U@ CcBx_ iK&MmߔH_ ?zz_;GOX?8GJ@Y܁j$299D 7%7aJ9#P{%[79R W5Cp8 퉥m@C/BbBD%Ry2*pD/ lqM5ԙNDcGwo2]9rkΝJ8 +nF,Ƚs.eAN~.LfZCisqZ`ū4b=Xkjj1ILmvGLG3v_]K^յE\._յEZ"6ǫkW:M]3[>tjϱΆY+Ҽ~_nc}^;Q9!Ol0H#]4PXXev&$o'wN6'Mkӄ #dK$|I[(֝q:晦cgzȑ_iSNx-a2Y &٧sֲ$F[ݒ|,-ew#qMU7Y*Og`X܁P&sJa ƆTf#k.?B>yT'⮃Hlz>'q[5ӮE^]l{O<-Ch?'g$gcs^׷OfJcLJ0+a Kv$vdR퉕PwpBA,o {8Ze}`Z^ "Zs;e|0΃mLا?`/wߗHJ27ᾅ22@ڜ`zi^u>za\LCj\\$2h-J*3B|'~״s״C״D*aEGJGoC Eʃa,ItL. ~\tE&=uۤ׏l d^zz޾ܗBnm:DZ,6;BνK|>l]bsrzs:O7thsRm~UزĖ nzWz^6:zWZnhCͧA!ϼ͜ߊ&z֝qd󹚾ښKOb15on&eˋm3>R?CL"[Gh"%+C{K%LPBdOȷQ: 4RKC-2 B>fO;gݎ9ZS,G$&0 :a\0*sބ5Tl= x@ ŵL DLjM<(rAEy:D[i9zoM-OF_v(x ž5d2E^iїk9:o'Z ̃\9iITE 8A΂c1ђ%ƴWαzV-^WW{u@W ~j0:O:2]L5ŘP[bW!a\7f՚- US"%>(3ut9\;~OeUNeU;?z "*PȀ&PG,r0|RҸ($q$ҪI2=<*cbLP@C}& 1jM$:g Yh^jk6x4e5OÜN Fߓ ΐ C<'BI)%g\.M1maT^u |D 5Qp D),j|4 ;5ed #RS9Tތ /LdmBM2Pe-ߧ;UWY#: KtL/B@I1 )6i\b:!9cOG=ǭkOxCpKkiAvmJVPܛCfq8,-]R C>A_̻jI *XE ªi.J9,YvOFtWNο Y@D40 pc;3܋H yGtUjQP6ۑяmB46GUNQ_/ - H,@R%!ki8+1<+ Z<m {r={0 Xt:yw,艌 uB G%,O}r+˧.\oWh1EOޢ{)ww}o7W( _phlx~Of cZ 6SMNPk(ў3>5=ylG8+Gǒ yB'>` {&RT[12HAoyZ+ 0%p#nWl9^ (y= -@,TEOX-V(/} 21жִORJY/ktv[ !;C7 u+nn^Ȗ6xV.푛qD6gI$g43JGƁ{BFr&,M`ތތ깗qt| p3J^/_]>Tx6=do?ӽxs2}}MNּ;ŏ)Y:]ؽuJM {~f'?VMڕngjj ,{ Eg&luymΧI 1#% wŭN.+q~#g׍'v2<nLFŨO,8 8iۈQ=2){-poȴ>X{| D-  +O;X8,2$S CEgS HI.C)*:CPoz׊3ЧO4Ϊ Xmw[v;߫iWʕNm6Pv=m/켭:` Pp4E'ڌMS3Q0\ ӄ$FRƓ*#/w ըPºPrUTu^vtc&NN Oncyh"J ǧ<F;XR0 0Ǥq͍N-.A !8m-ɁOϠolefY!L!2Ac"A(PuX2PuR= T-:q :C:TI,2c`J *K)Tob ] l =s@b\ei5뺹B))όz抃R,c\ei5t\e) +CcJusR*Bzs͕D-|`5eKX\eF{by- dgzPRMbjO}YyU4 Zjq`,FJTs h:eqYYI ^xno<tB~_W3VEE&ۼ(ǃsV ՊHa=2sebP%EfVFÄE&/h7sT· ӝL}k6wѿUҪ^;̼T OL39/SQij|U%`۔] )%X-)w I䜻RX!ƦN#]?w٭ޛ&}V3puB?XeL8D+~Kg/~iQ /k(kYe<䘭٩5D1Ɖ kuR,7j[RI,q*}8΂.'+(DVP@6bMS-@N] \ښ87'%7H^pJ-5(pjx4ߓ :FE!FJL9eCBy.5JQ#e.N/V;e4  ݲ&Ζ(PM1maT^u |F~P@DGfQ;Џc,ot6Wf(^(CmP% d=*:4Sk@'D\ D3  6@uFި@ݒrg&FCn(-Ֆt &FN=tyZ=t['n堵ހhKw3DIMU4Rn I@p SkL%ҷJזԆ \7~PNGxN8Z"ϳQPJ SRqq찑ZFF"ؾ77D6NxԬ[B3?ۓQ#Š( #|t~UR/.%x)xsEh2;2uGs9  N= wl.ӎ.pagt'Ke 1W.5Bٞo_W #{ETz6į~_J9GJHx}6ΚJmtV%s̞yv5QqafB󶄞գVm6fF*EM*@(r8L.،+PY}sl Ѝ4kmFEvܶ,Cv6,;Y,p0q_-,?Zen M)#~dfىNu3?= .FcV"†*NA\w^\iG੶ӌ\n"vy4):惘G6۠/(pOrf)(G;Wz[n\";>~r.u<]o(#K Re 8̹pf7˺˟OlS]#:8t1Fw~`LtcwDKf*lZo~=i;._U#lx`t\/e{bKg&x=潡^L]7]d=n2 .=wZ]x'5QCrڝgh8MJy &QXp4q($ۈ9_){-p:d9ρ)ZOo@>1`dL@$Tr :K`@M u93DfPzE^>Uu7hN\wqMv|G= WDs (ƅU ^uR? J9=i~iIw]RLӇ ^/k{4+Nob%zvKi (㩏~\uaG^R>ˇAw=><F;XR0DdcSy\s#SDp{{xCy&9txCff ( H!@aSE(@iTx(Ri\噊w젅(a(28٠t^#v ,#:jR"q"rM $D3!8NRn2"n.2P๎#!Q@.::*8SBJI{!ֆ0T&5 `6r%2k QgU!]nqQ%Olپ%}_fԕxuh7UxQXh^107bsM~ySbe}/~PM JUNP@ bdzˏ7,QIaق R/3KP \kJ=.hCL$ BD%P.1X4TBXC#w&qMDH댌;w/.6fPwm>{OAe'd]4P 'XNҞҐ&q >PƬăbS6} < Ll`;8j  *!#$&Lx kbln6 k/ZWkZ Z{@9)eِPx0ܧ9+ 1sy2I_T& z3!CFdHhϣB]".5u~`]H2;X69~n+yA{HHVY)ކћbPhQ R#EgE'YdK9) ~>Y~]rr޽rNȍ9F'ctKn hw&S`; ȣST3dȚ%Mr*):$pZ8%.j-Rhs*jcH #V;e4e(r~J{gG;J9 +#fIƾ-G~|Iӷp/O=v{=ktV]b2~a2qfM`*^H*ÌRuI%Dp!6nx󵹎l3]V7aL!~L^(TSIS+^A+Jhq!D2%`=w=6*ҌӸ@y-Sk^L0D)FkGqr 02 ׀u%AHao~59 CURR9e*(԰.'@x@@#7)g6W! l[O#KBju1)wϮE\9?g6j}i}q>,+ L1OqOշNS {B{wJron/FvM~9A(9htxqT6Qx#NF"DŽQ쒰Krrsx&'IjyDdU'ol̙DDv 8nciYSAq:q5,n##tO?ѥmHӢym߷}%Q ok ]=UXƄ8L֣E|6&-\z~:0{,F saOd|v[nv82Φ^ϛ*qw B}'!EmXm$d6O8y.>Nnہ97=ԝwed]ۇkdz 9u>78}~]uG4j㔞lq\u lpN{x矿?|Lz>8Gt6 J#rk#76*jVϟ}48'9e#g^D!TspʣʍAX qⳐ j8)rSe_{#KYW}Ovzy:P<| X5x"08Xv?_γn] s1?{WƱd f2art\-DI/iJ%[Dql̈́#EpNwU:՟wA`* )&= X5q{|}V=A{Mٻd/U M$f퍍Q}W&8K)ZqGˤ5b6kQ-Ҙ\"[L$FO5m5#yX}BSڻCm- >\3vI@}bV >8yv {f KWedP+]jz3+fCW ]u2yJ:DҚHkp ]4uJkBWHWF5;&=u̅Okٟ:Jm:@bm܌ نZ3uk?]uaQWIWV28v = ㆗ʾ&u6m qjxR53zS|{R^A\*0W'uA+q9dup+Ce2*WU[C_`O:X1sS.VFr޼=Wm^hC'x!P>Jd\3-E(O Y/<=W]oXzkkۈ-m4T6pV2$dhY'_>VfҋG1o)V=Qeoy!hhv?2Owc7~]{ڍ>d=-cH9ɍ)!8EAɪb5s `/~6] lzBؿkxZ8Į׏ 3+Z͆pgCW-骣!ҕ7F9[L: tӟtQ:Z*gƙ̆:\BW(âĄ`4EgqK<h骣\BtizBV 0=,jz%]gq7Mtz鉙͈2φ:\kBWwS2BWHW@WNów׸UGk'OW墮 O`n͆:\BWmUG):DbKN椮|Ukg:ZoNWl~+PɭH^*-[W>Dx\GsO~F Mvi638GUv"s+54y( /tuth7'oqXtl誣5b(-tutQ2fL-'TGv"s7z?Zi:i^P73W_3R>`O0 z#O?C;xQRpφwB 먭{F͡6J.-%agTls! |^^C ^{'h뫣 絼ի_CG'q-RVJ"\CK٫$]|_\5LVb*V K-s=$dc)&jT UH1bQmjcJEbt37 QVdIlI_Os ܌˥M8\ IlԱD>Z2e t`7&Z_k#@RkL*bQ"ibKQLOmE F𮎜O߼y{I͗Z6"_Rn2Yl,ǪTAk{,,3شvol6;Aݘuk%A1qjS]ox4TkjbR-[Lá$Bew ۼ}beXx.LQh) ! CUYKk,Gev6+_P[wlZ8x!ZW+J&V/ώcw1֠-; kI )dW ~'g:'ͭL^s[K9UJrkf$H u}I˜|lCFqU;b5CchA?F7iYmheՎ Vj) AZ!9pHJVy/R.` *`Ԛ%EM#hDGo7_*KI S2S(!l,;a0`' %*{N4q o5h;gvfQƏJ#Z,T|պJJ<,dPe2C8FjlEC\KQ٠+.T-5Tˆ9غx֭>SI9pulj8~PVzq}!RR3< / ѻZ8ZP*c:m` E ~L `s(SJ Ɗ+@Z 2҄WsW2ɘ`AVꫢH$(MW9Ae:)|- Dzgd+X22qR XBہp #6CA! 5ad\ !,[ @$4&X!AY$Lhn8@E+N.GWZK*20w: !&`GţE 1҃ZR H$10"heH4 Ne"y L|#$khL5W<om W%JrB#kU砫W=xn[MyY 'QOV C???=>k>kwq2W)Sh1t }v"r[ a,{4%n/y1j$!Y.//)4c?ղ%lgFa 2/{hMF n2v29*յc[ A P6]ko+ cMN|7) XLq`fL03dȒ$; ߷[-ɲݎL<HVVuu*tڬӝ*FVJCh 4 e^XmN Lp4yp =@l!v@Db ,DvY[yc =+ BN b~06qӖLxV@Ԩ 746*FUqKo%AQjZ:_n028 *פG A0 C:qh7za i3 ~>  i'0-0`d5% 8꺀&uӕh&GL&S4{MQlBa^ZVoU{CV Ѡ 1;77p.rs8QB%ə \we~pÁh6鹆 %r' .T:!0/! UЁJW( B\p=a5D}ݎz kDc4BfYa\0b޹Xqqr%6S`VÐ$-hyne C,l/qB LJC * ^?\.e~ 2kJ~t,YYB>,tHrK{j!t%bU', LRL L!HcKc ~/5w vV @T o:j KU w)U9Pk 6@ za8bdFXZh_ 0^T=J/p]%0|mQ2(ƞZMF=HKLC)c&ГJ p曆PA\4[/lզSїAVKC@m(%+f%xxht ")C_]bɮhXOVOeGo߶ >c R.#ؗ/_[07^ IF!e$CTi~+cNӳE@`Do,f^F/{5Lx;8g8XcZd 4Lŝq:kz>izKuf u{٩u]Ӭa\ Z׆\7 S[*|@ !&Ҋ(-ER.6 .Eb@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1]&P%&Pa&0@ڻ>&P!1`H@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DLrKKL a0@Z^ s*1eb+clSn(BN9;N"/Z?~ivĢ;iMz -S*.{9T&ƫM{(Q?eo6糟62iA ST4Ak[E`u( gf_B ęM=OzDϩ@Gyn.uv[\a{i. h4^֜4~x p61q s*]cmGa4UYpp o ,ӛMp~ ڎP$9U5IP)bn 3J^@0!S 6X5ߎa'nc61 }h?:8}35'6G=XhF'8iUͽ`jeRf*wH?W[Uɰƪm^5,3U \0e+eaJX-CoW!nb7ZZ)֯OA DQ`-v&EqO~2d1UKf>_F |LO3loPVltvé'0[?wG>7Dď|Oh&R?7Wž?(Fʃ>~\Q ?! r#|k'\R3n(dvz 0oLF7`[]*U]>~0[]6vSv}UY_z[{u_zT 3k0o^*ǝ|P|c5p柢c|hy%LY4Q;b~fIF<0^֯xbovcE[}G]EgFVx] VЫ+&n-%觾ţs[J4It0?R:w͗P.н+|͞*}Klp [ָ.j8T:-"YLnSDY}#wz= %sj&NWVxjMzq.us6T{7V=N0u'Gh߮™֢\w7X6uNjpwWM(MW@Y7m M>mf1|EyPmHTšU* ]qWD]m}7j_t\7@BYFB)֡nlgEĸEB-G['-'9;+I '9JrxDIJL:Ck'9|{Y3_lcBչ;XU_cW7w[4bRL7vܔ}t囧oxgV񡟴e4wG]un.6l0btKnv k{]^#j:pt:[f}_r䟨IF`s|ViXz\X?4i_c e'Qr玦7];L-}qazyǠ_bL@67X[z3|ppeOϏ 7Kxijt$:-ˇ05JPn&&3ji]μ)+aJ3I^;@O>q PgjL@p HŅSB!K^m{,nui5vAk_/gP Osmc| τ(UΛ`_DM Th'3ng4FC!*0J)+p#@ *.B,z1\+S$YiyAt}޿o/:?tZqpȱVL+g=A47Y^V+'*)ȪA~dzά< kf?pJk9E]GvjAlkّ\'!Mq$򸞥T*}Rs& ]HQ2 id` 6e\A4il,a hzTf8r 0n!H74E5Iof&_OHkn=xz9մ+yz/O0e?_#ozĈ$ܟQp֛A|ג/}:am)4ūE ^}%痾^Ӟc{_2מ>OA7 ĩi֝@p}Y_q]tP=>4M<݃>cNs{?ǣ֥гl_,z9lGI 569qpkmW)ŻI0?>:: ԫ%/r,ʟ f}^+)Zv%tKkVjZ^R2)z"⻌-)`]\;1BƳ'cOS<a fY'uk|5Z[wВŻV" o =/<3oUϞ,d6ʸ7h'\KWzW$R@|BD"cRÇ|TUgxZw](v<#u>>,ty9uIOg|6i'44S!PL/>q7Lkm#E]6d~Xۃ FalDKYRIb夂ĎTb~ J劉Եin LpJTޥj;C:-o{D`R7R`w8Z()g|]˽ɕ]muw589T ED@D:ryJ}0LIO{JFHEoS 9~Zqq>q :!f#f3-qh{ J)Y} _.RJG~ЀQ`Xc M#*SP.8tASc4{ؑX^c)0huș8*Z *%릲 27ks"%o-d, 5-}S3Jvbj(B}99`i%vQh(-H \ 35)gڤRukC8s;@98GJܰ&Q!sK =-]puo}y]ŵ7~ۏpZn=|-fYY. z^J VWa>+:~:Fݏ=EA܌=`>.j*X 'q5JroFY[p0,$<ׄRvTJrTn07yt3Kgs3wtrk RQ5ӛa~wa`/TÏ|]WuKVJ!9Rh7j`^uyO8iWQw+I}jm^HbBϨ-|ji{=Ddw ..=N@6Cvd E$K3Z5Z5:>!z? y]x;r?`!MSlJܟ<*/5yfZМiT4'^;[=G:}ׁܳq{ 8)ղ-`t<:,L[]ҳȢT#g 7 8T5L(qkK}8򐸎.r2B]H+Bd*Y*ml,6]Ͷz|?zn]h˛yREBUK&Eor`r^C192m˯c[ւ+CY+=~6hqib!Fqޖ/L7o(M~>bw1GwVY0h&(D>R{k Ȗ|ҶDF]xXTjr :q"qKl)-$7d2T6ݬ'J dJ\jia1Fs#VϡP&]jPp-Fݹn=:z%thh[ i#wIpg%d Gz|I>ч]YFBs^棖橼 s \:-xZp0w8L]H'lSv In}cݣ=>w59@؟JHΞt'HwEV@-٨fj ZFWƭT4+2f:*#`ߤdH2N޵?&j͚oί~LSqIo7 ؘocZ^aõAws(p(|8vndKbe8 T`zSLei kOЮ*[vMbZ~t;#6r0\+Xo V@ =2YR *G\R iM̵2Y-FiZzc=i3ǫ-1c5,כBLi[&;=3}%fj̪S#W Za[a&f2#U#XЎ@cqdȻ$yO/o;!G^w]&XG4y: a1AY[y`Rԭn=",,r2Cp\)"ɜ&9x'LP;pv۲QeDwǨIBDm@lM2:K :,80A#Q4eu,b p]Dk^Ћ#@Ĉ &CR)DPT2%1P1hle]m85v<]M:ڲԫ8 >#TtA&'cF`Q8k]W0α(ebU}5%D !WĢDC&!-5F H:!eh<֢SVv}ؒyϢOE#VSш׈FX%Z֐\DkJ%ϳB$29\H틻u.'RXYE e0\&D.z&B@FPFzU,nvxwI-SuV}u{٢-t@0@K>AF4zrǒuZFc 4^/C/tme}hӇGPa5'Sjȭ7q <ԭ ~|%GkܼB{@Q[Vi7R}ˮa.kĮOKH땐gdxH|nVTV sԍrmP &礻CV9H>eԱJ6T׌)d*Q{$8cV\,Y'ZgS2p\L.Dc2U$ӐG<&%աˊPJG"Ykc!' &Sk8ۭ }J9G9GQnCISx _Sڽ}lX?s_=| 1&5kS^ftR7Z(cLS>6xCub.c S=>_ma/kK3C3Ӆae񖞻wʰuj%9"'K<[E F&l=O=52s|GeqJk,M OX0#J`IJ YGX CƘHXU\zzbR!Jm 9Dʌ&Fi]$l RjB;oq,Վ6B ]˜\k{\s1_ᘦ\-x8x汬oiYt c(&G p26m]ȉ'1#޽ b >~*lGfv8=,ֱݚh#If=F5:ZƋ5 <>K50z!W!)pCtΆzX(K/${nb0KʣrSu`V|+] f9Ș<]^]teۃՉV{GrvZalבOY(oQi*ͪaN;vBfg}ob]H% voW:h1+y~=~򭟿Rt~/`)?P״, ÕOy}_z?ZpKZt?EbY& #:QZ-\Ns-B=rMh2ڄ]wpxlݬ- KtGhq7EW jh6j6/{ ^|j{jMf>nޥ=amhlz1^u(z+zW⒦ GMԻ5FyNZYgeQR¿%6Jp;q{Axui$LýsvBrE],KUXJԮq[)[a *-;5Kfcͼxz0.fCU썓.zQ!Rk)RxϭB9zAR / ˴- yYRBs& [r*}60Oi60O a)J[!\A=[8I 69e4^rbatyU4p17AneK-sSO!O{eV+7woƀ)D!0<`GWIHjƻ>E #QJ3ˍ:+ 0]QJ=誇r  4+U<] /GWENu صݺjkw- 6Tk]rv-za.#]pEWDsRڍ:;?Z@FJltE2*躮RAW=ԕ d+~P"\'sѕԊތr?pUtLNB`)Y6"\йh `/ur7]A6_.J2=oYb9f른& LF?h6\ja\rdZ 픻VF024;3Vt^D)͠jZ"!Vg+ 1-(tC]c5S銀FWOL8j!\+ qhR ꣮tZ .d+AnJb*ol]]5uU]5ՌӺjH)Xtu8v.E.#]2pEWDkYuEꡮFt q+5"Jg]PW)DFB`dG.BZuuEr+p8XUp1~ TcN9)Ύmm ONоF`%4B6]vDL-M nc:2}4XNX/02-> XAy}4[e`ӳ$bq J*rů7#g zӏt9y=:yRr[4qL痈bt<4#:[mef۹R4(.ggU|$_R<-%'cG@vc濍/.𿖛¬nK}:T?Xy8TReQd*ef(CM.YYj0b,;]8^U Uڱd)ه<dXXt 0UIۊ8*N+a(ǽQ=EׅQ<%7$3 `*9^ޖЌ i3 X MMr DٱI|C4!aR&!V6]aht]WHi#G]YfiM>ky.BZduE\ꡮgtE*I|Cbr?1`LF:ږ4tcx]AW=:Y8\tEu]WDiJ4d+moW\tEmu74|Uu%Bo juuAW=c:jS#`qu]z+7:*FJs*J5n@}xRԪD`h{Ã">;}5Mzi-HWX>}v, iJTfUue6K pm68vs( DN83ߌp]6BZeuEB ꡮ@ml3"\͈(ъO%J9_^30ڶwȶGDB͚Ѫ^>jFٵv@WbծE\B`d6"\sju]ez+<#] q-3h Qn_ꏮҜtEd+V(tC]RȌtE6=2؈_WDU/unХu!c %)p \.l|+&Fx7h׻Ih麷LGPg̀vpGF ӻx4)N`|#M梽C)w|S ͕0u-gZ lV3COhhNB`-lnV(7^t],x2ҕVJD6+@"J;L:yN+6Lg+#Zw$Uu.i1JWy iۮ(tJ>13jp~j镮fc t%]Z;Ng+5h꺮ҩAW=ԕ8*j\@uE]jt$Vt ]Rh ﺮk{ z]Ιt1Op"Ze+nUuOJF Nqf[&7Po,#MgpEDk]5 >jZ;3ך\tu>&J>誏2'+n}Y3|tw^WD)>:7c{%]nK7U"ʍ9ҟgWi]w >aw໻Ed2+7f}hfbzzr⓫*SZCp)J*br*mPA+[)*^2)ؚs|ZW=hi?OEL'pQ\1+='ק&%V̖b \1t qQ"^t;UjS }f#-PrY\?2Aϋ^eO6[%֓G͌V}d힛?7_|\OϮwf;^w\ w2Ou~H>U6\&1.(|g3ߺ2}rql>N?l~MU˵7һ1܉c1uR6;7sQn/vQCҺ :j5x,VM`&ZC)0 UJAo8.;r!Jwb"ěubi 7p>_)*f+lxg ymݻnPLSYt$A5 +rl,O 9y@,X6ئQ-wnwupzw9Ej};o2x-·Ka%') SF^rZW;5UUY2lJrIL\)OVxZz %uh3 f<\[p.i% TE?<\ͷ_?;~6_~F],ӡbF ^2olAVXt8;w|J2[ + _5Kwmy?_ }r'˧T!' /H:)+筭b *E\˝q^!╵g h!(n됒>zBNdY`BzuNZKk9L"%Iu!fQbT1[ti m{5)C:#>n?W'׽ZvSZJCVyG m9BvAFcMɺbx֙6Ooi)VDkRo0* {Ӱ7vE<~&J]zߦM}O?3Ws\5H -=eTش7` =?Zp-lX ZXM_,$FWFQAA*+R匶!>9^vIaoS5EW.5,lRGR;dp0љΙk%Z7Z/Y/Y fVLZaq88L^isAzm[Fy;H9Jж V^OOw^ZIR+R*H+SRjѿJzuWZDMB:M6%9\%%p2e* {"BN&"*H!ǒRYC)4(-^/*a4K}-_-6WJ9])¾.փzPc4v}s&kE}7cAϧ4]P69KnF-,6r %Qk ywx[93VS+ !M38{ ]1j~spRh _7CN׫֫aa3djn   w`YvHTHX+^QIՔ(5(K Y.UJ |?%LIھI,h@,ݴRk.>Ju(L.+nt,6f_m_6ġ}Nvk5!p\{l zs*؜ʁz?F|"X"pZ-/rJ:+VU *Ic)EW;-OϮ<?T )haMTme>qS}<[Iݻn/$7Nm<Ҥ̮m/h7񚳔PZ/iv[:]٬ anRˊZ}c;r3=܍F?|]h~+sgex.fiEFϛn|[y6ϵ'IXknn~"\ eTp4/<b2JKOre)x=a>U ,\~k?DzRof)F.$IzJDKܩbaSL\ܲ`2^8݄xB " \5{65,Vc.pK'Lش~EZLiݬֻBAi8)'vy=O;N]~H[_3E~\rrsRѫ*A;!?_IKK|$9qM#ݽ{QemBʚf{ŴF&L\?ze`|2N/a_OVǽ]2/'u7D~#ƛGRzH7MÈahfX~Ci8`"G|ǏcoMӶqT6:_dӨ kU؋Qżg |2^aFS=LY4tj&9?>5㟈͟wo?_qa߽7~G8X4Mai7?^4/݇4|hi.C6W=Ƶ%7{|˵2ۏ [ )~!?v~^o(-R4~\]дLS9ǽ8뢞_̊ `Bx[. >ҌꋗL ETnvN[/6G[kN5H7;ב$PV`4hZ6+L$sRb 9&P\H66Y^Us48aɛ=t"!O_D sKDr*Md #l.Bwʝ8M>޾|~5X3 tv8N~p 윹+Bѕɢ+Xչm2`;)oh_蝫P_༃TZY)B|3W uA&/A-ؑ ,t;+k?"&!1H qU^ڄܐ(4@3N 4[O9%CpA%m\x5&ʼ!{CE_ŇWc.kس^vD 2as :m.abAmœFV]>:c/>o) xSJ9*+V`&ta6.Q*\0ʾhs.i{<0y̠]H48cZ#,2PnG MXw0%k摶30hO(xV֦}͝cnG~? /tN&蔖#CI !vd !se!soi/d9N&Bjd# `T*z"AiJ>HR}_>u4@s},A+d>bi3+,dI/>9t ^ǜ\0~YqҘH.1^JvA)ȢBR1hN n^X3I.S ;3v ggaus Zp7uGj4aݫv]+kv]fĹW.ԬvV!t̤wMZ:cbtR*X*:d39u$yՇVXf79)mI^l#Wc9S,IChb``Tgmঝz>!&3 C.):>g#EptV"XE۬yD4с6)Ţc 3uIJQ9R{hڨ#`ȶMkHLS5DJd1&R똼 $a7rt*''pq3dab^uMЙDOgT$AA{ ;߀!KeΒj_+VWu,I!&h@-7r3U]6g{#(?V;-DB$ȢP+U4J<.vqmѽwX7i'gWgj]Ys9+ ?S6~Nlwcev;&5MIj}C$@+|U >|CBFHAX`DߒȐRBk]ɠs(LA]CXm7#"̨9_ &4ȼfa[ ?OW5(g!BJ 8 p3j$ ѐdOb"\C O D`"N uJ XG0.'&VN2Aud9KJe[VѢo*Xfw5C!Cۄz)]R2_en4a&8a9N*3a~I/ ci`aԾ>Sӊ߹OKB $y3^))r"gNUl_WRL{ǏX3_syZt5ˇ_믿Λ>^W#1ǏoDU?oKҔK_oFy%g7k%y8u%^ˢW/W$̯8:#{/gE^gWmR0^籏%Dd|jt4[֐6-9 mߥqs֋XޛA\v^}mn%?oT}zaى#ޓJmfsRG|ЭK]s|?㫵.O#/ߑ?|DRf 6T0E1_YA静&챻Afz{l0j=|\.>-C{"E/54k}z/R:lDz7Ď.hϾseI/B0tt} 9ĦlxӴ)JDŽ:C[kgHirRBIdG@$=Jh<9xa#r3erȁctـ6&y:Ыze|U}jscPՈLJ 0Z=Vm"nm-E cO)pPC _Bb`BAFFavaR ZePÄR/?Pj{l{؁B~hq4BɘdϙexR*iΫl(e5zbh$j k=5BKFc4j햰Z->bWY8.F\3^mWAڤ }<' ,cM̙6& 4%o=:Hsp9&ӽhe6rS;H* g ^itx4Ns{'5:EPn+:4K:9YPLp{v1>iMO5qy<3-4* xs'$:@*$?nr/%*$?" Ye!Xj'` *[, L$d PEnmɁc='v.p|y!uDIU:zRU ,E8D~|LG9'/pyVǗooVc }646~} 4vڬe+$W±*EGK)O]䩔+Zy芔NvRtVIn2^g嵋VqK5MK (?x߬;=QwD򆿦{w.QƠBiM$J㭯ATK]#r웅I'`DUMZ>rrZ6}t'ox^(୛cǎ bN&Qy$*/:qIJ];LTp\C/9R3‡fLOU@)T3]kt2>dD$M15ZAMvԝNm$-Υ4tSTxSHPp0KP[[HKPWx?Kq+4g__\Nd7اqIF"*8hQ|ыeC`^u1C(^-Q<$.ZcP(aFt۬9HFQFq5:H)/82ֆ,7G0LfQ;8ۍQ蘌{}ތif^N{%liOΓjdLv7<*8Yy)o|}ר6mTBx)΄Jvu!`]ȣʑDP2;rV 2Fұ* Fe !FeRfGSV8hHRFy^|TAcM&J1Gw}Cv|OӤxU겱ă%ԖEFkhD.1Ȼ[e73{cfM&[rv`TZ ]1o;HxrŒ4iNnq5RJ(t8) NŧN>л]3vy-~G'vvL뙘FL+YBkqاAjї ʒNsЍrm> }VKP5ǖO;P>doJWHqCV\dUBk>r!:Hr1uU >&.W7؍ 6/]1 4L.(3QLZWq@/rJ:hj1);(rMi&~tkQi )s&@0UN3Ob'E55.-iSp}u6.#RNW6DW%6证^_vMx~s;>l<o쮸B35W]o7g ƩOzDn-6 8КՏ_ <;A3~(nsQ?Z L#pc N^`%W=o#h;#MC[؎V2$`SR a:Sρ[!DA i&AXo+ʩc5ӞSE11]6 %FRiEHf^9 g}gx1A~*[m ?wp>RNvׯխK_Nj`L9a!*ҩy ȠQsg(׹&Mj[̬(GNo@&h}xC?N FCEY#@1KP7--v]]+ J"3I P:d 4Ԉӯuth]X Vݡ8},dQܖq-] uߎVx09 ȢSDS495M›T?RD9f#ڪ"RFEMşVLh41 ƣv(C ~A#Bej#gC`9Ӧ1Egz偉2%]D̓P>WlxT7:54fzOG+zK\tfj}"1jeZQ')i8܅q)x/Ҳ]2UBCp*%X\ipxm)=Q(Ѣ DDݨWKBq82}{Vº 4 %&4v!'+Np.D5!LOoMI8p]R)j}P PQSn qU $25j^Aڪf` %T`hLr$8֓r ô8›TKo Ϯg3ŏq\>3^Kr4{ktҕr1şP5p Ofgp&?bO  eC w텉|,Ґg(D#? pd0hGxzW1*3o-!"J>yF:<(o'7î ǭON38ؿX\* #G:-HD9m_嬎k:@-@ CSd]LfȬk?N/NfTcqOӧٵ?Ko;?7^_ϦN asgzOgef> Α?r9.OfۋW$-qq{K竚te36O8y*>w:'V)oVWg{?V2>? .|(TdqDI1G%5|۟.|wxTGoOmקO߾?%T99}k\q#0>gXEhnKo"@pO~ټij۫foҴߠ]vo.}Y.lϖRXr~;yw着]x䍖 +H6TwT'J~X ; CB47 F3=>(7^+o5_\K߽Yd5FIg}W㺯6g :wXmb{-b!;; ?3P^yg(-fU{l牎Hcn 8#W2?d29݁!VP-E_(5(jnu룅&&{vSX.{J g>b&nP?#X&$QF% X (Y,+ jDF؉FȼMh7;~t}: ext<7E?{|8zm>65-^L*ZrKYsw^χ|z%|{]/LNIGq|e|Ŝ?:$CJ!?䬺up1>6dkڋ $CY9e;& *ׇ/М}pO]rp"z9MlFccIAQh9W.e7(R߇ [LM| 1Y_W=\O4tV)ՆM5X X(EV/ O|mٵs9$w_ceT^^|Q6l1O7 1V'yi4m=;28ų֓g~S޹ x)*o97ꪬȚr śob"3OʤSLS\;1MkfَIӰwTYZobEˢmi)*gmT0jd3a3~1фMe2٫ RU7Xԙ:d['3"3@QQ NK Nx1Sf.XRP2zO^FZgG->QopNM;T5?lNKHU])J*9N'6jSr|RNI3%Uz(́>}4*+AmUV^Jcvau\SePx x5˹_d`>qCGKXj%U#dE:Dz>fN*ɽ]xUq.~с\tS_ʓ6}r+r9 ZwcF{\7^>F;ǸDh#P@FJ O1͌Ng 828udٹ{G&z31fKΠm!A$8?Q=ZqzJ΁NL :![e΁$ҵk Q- 1x+8%ythR&%W ë(b.W,`։N:MUT /"mTd-\A3r Irj8AĄ%Br:&<8A%2d1V)d'EmeA2v'j}۲{7ܾ׀ښԺA-T8.Vݨ Ȓ6RK޸Dt֮p"[7`m us܀[,܀ XƺU&}QWZƚ2u=GuЏXB c{2[WV]=ubv`!%f_Uv۷WWJ޺장@\]2sz޲ڋQfKY|C5^Ki at*44 J#^w84TY,wDtV-: ,yhgY]uSD˲qrwo|q\E:h[FMzpP, ~u˨'_ ڷ7 g0ѼݜJ/=Exq{ya0gؿ<ގ-^~H>D90Jv=EYr:|3ࡅzWp<؛Svj0GyYOa€ eU"eϼ,F!N([˙L҆\Y7ٻn$W}c~1lOf $`7*ڂeɱ8n`.mI-SܭFmP|U,V|F#،r 6ͮ!{m>+bC6_co(۝,g~yDJ6['Fr4*\).4( ?H.e#V%Ny ^=W[ uq&ب#!)EE&d`@R*V2kiHCCq"-G2"O)E0J:r (Btj>+\ PD^%%ViUf5&WCxwWZ;mn9|r<^m{m$/3FGCb"@"Muv[kR*frs=k*%4j$'/TGdU@k(Mi p=إXR=e^P BCէ.\ua]v.싌`O#l݀Hiz 0+RYP,< ia Am4ndV %)MFE 6mǜByT䄙%*Kj,hK&HjԱԖ8J#][Zσ1@9BE/H2e4lTd9XYV<ؐ#D2!C^j$d @!H2튐5|CalQ_*e`<X>E"ʣD=3f"ZI\U"9!&РVK,i T*Kj٢.ҫ$͌YJv\Gx=Zd< DdZk ^+s3p^ˤKQ\0Q.C.u*]eyci=E}juFo8?غu1amAp7~B]`8>m_HBMPG&IŬr;Ǜ utSB:c{ ui.CU$ g d:r9">&9KAE RHechQJ';R1Z_Xc̢Sl$GR˸S&̸)'_~V3CFv=$޻y> &}/|VYy% 8em9NSRY xI& ?40n0mU 0`IBa(\H"F-PxMXV?}i)םZ7Ժ]iL[s8zkeЏwZ6r~}Y7w>EK-hּ o{=go^ʹ+]XZu1,6|;ߔukZI>or*ZnGls{O5KLZeZe,9SǪZ`U-o^ZĎ:'+ ʃ5FXJ)#7F1gLI#촐X(a;%?4gN;'gNsC<.R eu*|c N\gnNf-ֽA)HBIJgYrC`=d.A N"wLb.|egcll!5l? ㌋w:ɾ쟒q:/7ҝjwK0+s3FZfL٩ DT &`G!"Ad!\1 U[IbfA0Rhb$)x$RtI!$)rP<)զkp;.I}f+`Ki'/5TIoO5~ӎu8,Cm;j{#WfUXlǿ6SKTꈀ88S^s ]=av8w![q;oVc4nBwvn0%F˝ DhKX1&V($g~U1)31IY9`,O]pQJPhOD8h/VΖB ]ȕ<-3<6V 8?{kNˣ FQᖔ΢m@1e Φ^&hsЃ zQx !zNfO&0R~H_u`aT6->q *{vN^}:BmѾ-?;;\]?BHJ~%l9:I[!?_9KKnIO\}7`TYPcњf{ŵF.M>vr120{>{U?̃to%Ho8vB@S0=ҫGFtE01-p'EF_']稂uȮQ;֪x{ݤ{+b㔟hugtj&~vNRO$>/?PO?}}_>qD;0T* vMai7w¿^4c_0gC?ZCxS jz[T%w}H=֘mHʏrw?ԞxIq"ЃWhWXf]Y+hEx!GA{ CWF ٨ȾxTd}Nz${̾q)IeFsghtpNS*7CNe6Rug\5-|>DÁK,h"~*C%_r '7)HDftqr{}("N"]w*rN5 w=9PŁrg(ϓEP6a;^V3)sڿطq/`/*A%olT`!e;T/Oں02ЋBw^7)4A(a3*w(R7RƋ T R+v.i›1Q&dI~_7sB;ASžJ}jB%1'Y#ޠϣrϿ Vyl3QdLX #zZNWS痀ΕAAH,$qe2pJP\{褐(dQw Ai8Drd2"TUzB8?L*C2cqY5d&$*0) I#%(aW+r1wBZZy0  Vr׻2M V#;MHލo:Ͼ7|H[Ou3 I qL>FJi|!2i^dtlӄn<]q?[5#$ŷ>vۃ&b ֵ͐Ӑs'V=xYFJO)z/  2c'LXG\Uȋ˴O_{HUGJy+4P + 4`{|=6mr+Op^2e"֓ p EBFv=P.J 7+.Nt?}o֒}+#z#Ux0Otyl& f\ Ք׍udp0eVo{@TkknXl ϵgZUy8fRI{RZZ$HAH"A 4040=_7bj[)؀^%Ơa9γ蹍FOhfs^Y2F8 HS"hgP"y(i1%k&X'3@hKQ]kkm</çgYPr 52QiIXTܖFHdC ҝ=qΜ,\죌۫sI@0?PC"P (NEo@W% J(eɘ8<eBa5ӆmv$vx0 td2]֑Q9[Z  fd1ѾxilMYT@/!C ;\/c4#(e1fZlv'rj2N8iwzDKn@-YR[+#'!1ȩwHQ\ۡwn9p (9J$)Y1C2c 3X޵ўkcM{kK:2 A}0,bR$.4`0ΊD"F6vCP5y;G̹|v#W>q;Q i?Jzv{/R "k!HU`VJ9Sy+U ~o* u^7Lu-\ >w~mfRc2B #k3A@(J Ӣ|yt[})uցسB>lq&o/K'W_]BơR~?cF?sR7uiq#3j';m4$Hڃ'Sq^#E#^nظW bx @$oKMH/2t2+2#eJ!hrϷGkRNΖ)!p[CbnlOTB|N'xH(ER29VB`%RQ4]K+vjv{ճUK<;%Gj)3H6VkIqD$`2,Idߕi/;{.4>?^h+rŐ_ӻɰ!..{w9t aiz?PSy߿A7D唜 nԙC_~_Α1BW[α8%k]q># +K87+]uX[;WʡMՈ^+_w:KIO qJl1bp":[-z7&쮇gym=kVi+ѻRa_wuD,J| ,|i9|wm 䃯_Ұ:DeB,.|9]x7t%mzymHu6Z7xi 7kl y17i:ʻM-G[[ y\aq`Ξjz dE:FuEj]n+ v:P\Dw;ؤAR6߇p:q$òwǼV(>az`fNM f\$q~p=Oz7-y ?l^m^!fhTkW2O&uLܜ6܍mv۴Jy۔;2]a$y%ug~| t34# ,ס91!` \)Hdr1vDnk;2OnKIj )'_bf15~#ԁq5gas9ޓ.`ru~6LR]VrHQ2-hLMxN)u>s۶a=ţy]Sц_q77_ݿC%/RD& S@`h<KPjs4B҉`Q}2h:XtC[ h!(n׮::uRuY2)9A.r.p Xvـ4f0LkE M5MI'ִ{YO7>lQ/-ΖY:o-sPܩy">V_U1]"!nU[dȊXY`PrYń>PC_8u18VD1V)ڻ( n%d/Oh?.ur3w5~dDz68^*p 3 /tdI+Q;o =gc|Y8;w~ff׺o D& O&c-+.`$i?\)0**ӛ41i]hzcG"wEMVYȓRI @IfTA&4rFr9bBWm%V_&ս>^4lкbgM]GM'|19$|bf& 1\p>y{,Ĝi^/zs@t%JQMC26K7FґP\9KLSxupY;YzƭgŦM{޲|˒[,kULtvU6U߿oZWgE8JEw"VVxH؛G2!yMȽ㚐C&$ Yղ,`D m XtYt, L$p\CF]:`XBГtdS`)٥2a9~sn+ϳz~12//_g|n߃al 0vilfadx6ԑk@LGBSFrޑP)xߑvd'`ɨBѧZdP٫g$( B0* $"juUR^]=CuERR`g' dUVٮB%`Ҍ]q~p2ݺavޜ]~^N_*13o\gFWW9Wʞ)iP[sA5֥OPv5`.\,TZo֦k~߿)4\yr>RW.WVzP3RBsUJP+q,WY礬vJ\J,A o=h~"v"c,MUMR VUV8P(ZІ1"T\ \OŵPղ뮅B%޵ ] `SRW`s:&db yuETὺz>%%*[}2ꪐTQUpuUS٣種FopS'5ew5-lI'k_onJV:}dw˅!dڦ*`+rAޱjn>hW-^ V|yqhK2D5:enb=Sżp߷Wnzxў~VMM.M"E`17w}&b} ü;Θ[ e-`=S>,( ,(䞎CԢƮÂBX<ŃB!ĽsZSab,}2JЯ#0e *d &&. yzci6uX'^a|=N%->\`FJ1M7%z\mkhaG}S/[|/;ð %CdjKmt[i4hj1.(=20V:]t#r hS:D TJ9Iz*&霹VYFqcY[9 @\eҹr랴vڭ^?+-d 浭v= Cfj"\mU.Ρ֝,@HT NS2c*|eS$cZjN֡^+PJDJ0g-s$<LQ!ń: ,a Bl,sϤ`֡4)@gacJ+.} {J%.2M4ޡt=/:c+{UvJTm|x 1Hw6D&gk"h/́L4euI s Vo˯<$IHZZ8x1̆5r!TWKUb*-3&viyxk|rGOwͪ[˷:,N<9?;͹fOy OdZ^.|Ls yIdU@CƔ7,_3 9UKǘkc\I%Oыgzȑ_ 9ٙvY$c!8[˗夃ܬnbŪS9KoRL Ϙ>m2*la-8r^\VµD?.blp  &ϟV-67TpdxEbNV,4`d{ޠ][Š,i  ^̦ ZF]4XtI:1e@c+[ja2Tv58jVVG[8Z/Ml&' IqT'g~s]Vì,a&bȄ $"51T[F>Ĭ"cm2v5j춇-|IO"VӏSEܦ"׊P::LppDL\b9ʗ\fKm g6@HRR0a"s3!2餙"oV+Q;, G{Ǥ?]-D;LeC't]oe$;ψn(pأBGkkW>G4.iX]TDxN7ZH >24BDODXC͎u>Y}Hy-s1| B{.{fLe$dN+eF Fj R )BV*S2,R!d9k-f?h"ځ( [SDw>ofW>-kF}S s8R2{RVRl,hEf APzYP^gυ E%Y92$K^\Lxt"U YhQ!X Ia4cWF*Fs!⡒) )%XUg7"8-g~̡__B ѲJƼ6΋n|n 3㟿 R6 mrLCDh X%#\e* 0Ƴ~VKrOK;οl޲#No Aw۞߼Q $;[{Tw"S;V|>vqjקվhjo.=z!5?&z*/dD|_+@*KeTtm6=#AGMўv6Ń2|')l1Z/CC `' ,],ʓ/Y vאL6%}'T44%MS28by_[-N;4j 2Ojs])ȈĽv l!qǐ5檯-P 5ϛ/mp2*e U?ymoH PtwCw_nwoos<Ε0lͶA;WIov>hݍ/-oܹ]ɖVWd}}t~lw GO{|?x6AvWno|I!{ӝxޝ"yrZֻ_ȕUO7f:d\85Fh]VɉX`:7N9HPq|U^K##wO vj;{?H#T,lj.1a Y8.$FRZ>2UN3zB1n.y,(*˔!&E4ҙqޫ'{t 1,>h'Uޝ~C?ϑ?43]+[3YZokMj #V箫Xf+q?6ȈT2ԑh&z֦@"#"Sly_',NaqRa:8XJ4D!H0 rZLI,ry'ª*q $˄sUL :Pa>%F88 ihHJ06+GMbx@45 '\[ӓk,E!Ilceصᱺ]l٦) 0|:x-wS\ӢnrhU¬Kʂ;-FrdqgLjd=R]r(@1KD8d3j}2Z&f2nfs2giտVC"V1 #@ >Acڵ7и VǢAiQ,dcRNrɔO=Gzng5zg 6wh!OLRh I(=2")>Yirth=*{:4:jtj4wTjJxΉ7;Cy=t yVa;[vv#r2b`zuR \ TL5Y:>PJAi ^{|Ȍ ]8rI=8=1aq܂Q-{W/gmMj9ge{KfG,r]%$O(fU(Ր3[`)hNX(Clf, m6Z M N|RMo }H0*udSe&1,:;Llg4b/r2M zu}}D\AgBT"$xgWNhzI#UrA2^p=J5(xӦ; j rjwړu[3csx\0;Sq<+O FJo`l< M:Y[)0q$]1jsóU x)B"Nȭo)DR[%i(8/LN߳}ciʏjZ͏0lϋ1#}/-{N9x3T(C.J32A,<OG)}yG̡͛5~Vն`{ӧR#{YN'~ h(e]rn7{O7miH擫6 Z^jH'`v5+~2w5ءM&b-bFƹ(`-85 ݯ%6`qHycuv۠DZ3@GNhLq6kF1a")dK#fcvgK-j]-M?%%YgO&}L*ZHGτە"8ofpq_|njOGhyka@\̲l5/<4>Y.Շ9? =$٪W\y-P2tzﰣ1a >0 ]_H {%SƭxW 6/|aH8 U-*> ) '\p5En-C/?ɱ3˚ Ƙ8:y}S J[ O)ý'/^%IrYD*ޤskIYs1L_![;4 ɖHdmUHNc)#j S%' p$QHG`n?Oxbc]w7=DGօ{U{\tnI=ӯg7_27gxzW%J?M翧vM+S.S/J\FTiRuE+]/ka#W6W6[7ɽ^e{qO˥qk+W$,s[n'$qƈ|J%k5VFFĔt6l&ؘbދe/cf&9pz- ¼x:vt\v¬8Nkkj i1uDt<}3_$?MZBĹ` ~PRE$!UrQAJw^V-Ȥr$9":`+rZW23)bu6֢i<.fe]O`jAt~VI+(v(GR&*Fb9b;^ _JHٟ2l%9\!%?(੒pM>ebDX3ܠP᪢uv-^#07U}C N[y]fW]\LPky\k ^0$h⒯Mgd $MZ%=8hy9Nx4ܡAm2n :Nt 0hbA{|1!,0X#xWLř6eqjKӬ]ͱW8 1a{.߷ ϝZ&"F#첯ZxvrS ]4\X-6o@/_o RU J:psLpJ(-%U[JkHҙǭٖ|Aj "j hs1T)5o5sZ,&bPƂި~ s;ް||e9|Y*\So̟K>rv^źY7wU#%߽>^MCSE_~y`//rsY7WlsВZ9"hkwk2QfgflHYYwfޝ-ۻo Fぞ_7M#tһ^nxF^57xny@3nXPٻ-͜?dy\ϧ_峓x姫9 &ɜoḢIM-;A4N\BW/_N]Y}"l>~JryJ#5e))2:Q0& x~x43^G3^3^}z,ޚ@ dJlyUV0l2LurIC:`F 2J-1‘mIYy܏ת_{~y48Tݘ>9d!_Xص{>s~1B7R_ub?]{53 usׯckf}85PuGkeuG^A{}/t}@tKt:p5 ]uf骣\>ۡ+G8ᨫh߼(?+J!+vF ]u|0ꪣv骣>^3|&o^T")O?L_ͯj1ZsL?fry~=iAksS{D #:- gEݻ_w0t`hõ$7;[i *;1}+ -HW.xwPsvᨫաUGk~ή[T0?p0tẃQWAz UGGu+eoWvmץ}gCt@jGxT[:p:{(Ñ]nݹ#:-mm?Q_3䯧ggDl.aC2=stp^r]<;;oY+pqs3Rox^|]Ƴ9'&ߵZV/1[Q8QȨ_]o@X{Xs6~qhO6Q;4s"j&3BqS-cCVݡ`XQ  s7{,p?[8zj畛י)Q.NuJ}gr 5jhUoJTt"IŨ~OGLpC!]<{͗$ >"]no|?mcu(S[(EJjRWߒnv*h.$RT0٠S[kݵ\)&XjT b6FŢ۴Ŕ~a-ͩ"1';CMV\%_F<)YDj"BW>y I%31WҸ\# Ŗb01$.iD&O |'HXj\6UDqV@&kı*BDh{,uXz Bh-hl6K@ۘuk%A1q欪-? D4!9~zQ2pTڄ08\NW:hamM :Rj*0^&*kbiͻCsf^8$DVWTIcs8޽d坏tWڲY(O`ɐ,|\fH؞7!PUsYK9UJrVHJQ+wK_>co5?&qD&j`Ǩ, Qb69Ϫ) Y*6^f\ Fn&q HɼF  0j˒Bth`+٥$j S2Y(!-K`0`%,[PQBQ8' Z CsixAۉÊ[A7jx(Q܏CZI24y2C8ji,L]qiZ6z6SVОJ/38S`NY4n a)Heo6/P:#r?@ɕ@AqtL-꜇sA16x XgeQ"\5"%V}Qa*\dW21GꫢH$(Mr)|cَd2f\Eoa*Avc cz@~~zH eq2M!,zP ! @HhPD&TMD4nlVz7s 5%@x`1f#2C3a(*?%9 0+COppR 3˞ZՏFa ֐Lh$/YY{s6͙RPHq ^}0 #ڲ; J` lJA!N9:m]I5eATGU]%ާ@ f0R<ʐh_NBj<(-1FdY% D۝BArdVc囯yB>Ӌ0X[U+&ĥ**@uAQCu*wiD#NFCvŋӾpCЌQiyyi>0zUJ5BWAy@9Bܸ Yd&prt9Ji>Z.UcN0X`xT q!°) >@H&902eΰOkELC}¼&H.:4_S4Cݚ7C6dpt=΍d!*T?V<{]bΪځne53D!v "}Ə)uxC$*r.x З 7tGpn4k ) 2" -]Dh5؅75 k*Rފ"p`!-#jw+$aQ&0l5p_ی7ԳSƐ-؞FI`l=:_N槷NgiN?΋6YT  B:4qtjf=*kp['BgGEEmEƬ#y$4ZDx31#@99hްMʌ1 ̨Y.j1MF;hC@9$di 򚡔@zZq^7öd V(k XW4J1҈ᓫ2h[Q'yˈnUžcw((c4*[nFBε7K!,0LE@ lI<ZZKJwK'soju^ɧa%u,v sSYd+ ԣF4cs 9Zal s kO= B-Eɗ^։-kd(k@@jY?!ȝZoFhcN/Le BWn* a@X̴x+0paUaIJ S 1>/=de轐hߟS"qX 9>fpk5U^sv ۲/pYhҨ, H d S,Co\U[P>5tԔt+-vuH"XoOt0 0Qn=Ջ&B6\b6[vv@Ci.MKnLN_C ؟2qUBeFJ,ͦ>9Ob|s*"XŦ`PI.T@VGo\$FhN ; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@vN ʩz~jMN Tiy A: @b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N:֦2HH rI j5DBƲhN 0a'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vhN 0N MB{\qڠw9'hN zab'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v t{j=x٩N::z=Tvz}ݝ\P ,wYb5~T.'@u&% ~=f\B%c\BDo\ PF0.ݨzz9oRq c H*;H%8qpV$+E:ɕDW9H[p~KNuG#4릜l\W%Ĝ{Ua moGruhw +g 3b"Dڱç*ݯRn%.gmAEG|Zm߻;rZ.?ʱbͦAvcWq4;-xYyRkYgfx\?-gѓ (rE˪E,U<^foy*V0{l̛B&dowǼ`fPotcCu$%=oGwXiѼ5RJt/(TȲ(F* ,8{:qIJ~J%?P6ZU歰r><)hL0)0Y1&/n ߗ̵'ݶ)҄@<,U$dވ *I%hqhؔ+IQs,r'\Zc1Aʃ #؉gp W$YOGWq 8 y#JjĎ+RcW_ `o UaW-Xt6Uo  W)#ZU@\me\=ꥶ`|B"&+NWVqE*e\ WJ{P!\\R"v\JW]JBFdpErA+Rm"0+m9wtprԪqE*Ǯ+(i/3qmԿf-d&7zܘV_6w-$CJ.Lp}\/n+t-o-_ۙk9 }z,X#ޭzW_j% -`}IB2I i1-<Ђ5@JHકH(*v\ Wĕ3A)HpB"L:v`] W*SJQx$Hb *yZ qQ*Y|$؉dpErKWFB"J3\V =|tKU?~j? i9VU\Ic^:'+'WTpEjWjR  Hש bW4jg|P 轐JW$W'+RkOIJ;uJ Hn ՆCOT'>#0;qpͱGi|2FF$3fGjĎiR !bz27$dpEr}2BFqE*;mWjHpHW(ץ]Zeb"wpW*1;OW( IV)'p W!HSJQtpErIWֺ+T4OzpXj,:}n\'p VtJb\=ep>@BBϑ] W$תTpEjpN?0+DJ"V$+d*"AĎV%GWCĕʨH7HW ;Hno ѯ\:$Li%޾D=p]0uYWW5'~4[%j=,7)UAx٨)W7Jrۨ>m;CYI1;Qbt*2|_{Ҧ2@j>@* >kDď++t^ܐ\{fRku"GB+')EW(I/%]Zc1+/1 K%aU}* HTŸ ʤ4-&\ DWAIeWֈxK%Á7`\S"[z WzV kkUkTpj4T]1W Hz Hm Eve\ Wp4)V'+kMW֊qE*0+?T\iJW(D2cWG?v*汫!:ooG &`^k#oңܘ@HL(WdP-JT1=DL[P+  H.$+Rkd"!aRAmHW$׋TpjTvvb\ W`M^w<'WTpEjm8H3*hBJ8P0@:5DWqE*=GW_^V:s KVWTƶ+͸zlKpE}HW( Hm"*0+TR O;Hu : `" R=:_T:ɸ 4yϱ NK5ϦfxVuy=oZJXV9ڬNnoF}==lG|E];~ ٍSkķ]!cۚLW]W~vyVMF:l2Wѭugx{@S. x Eͩ яVteOM*=C~=ѨɱSІ!3UO %gwl{7IKW+!V_5ƂZ|3X?k=vlkvVa1|3zz~F) k$ػd5\*C-lOrZt{|7~3nz`l"otZoZ㖉 \sFq֫:\oثf厚nz}(“D1zeߣJHә`&V"{g}jlu{yy%V+[7>_ڞq^IE,O6zs7Pm''UR(m3u"+R9L4Qy踏߫3o81 qx(D|5.'jozbu?x9giOxߗ+|0<[_׶wWYQS*~><;vTy{ 37,~IwwuݬCj1nꛟs3l69>P5M#"Z( ݸZٲ ۘ*;ȍ1j=bOsdv:ٕ [ }\;QLZ yL/ T.T\:PK_ [ #lu1X siiE^fha'KlKش!]tOJB֡Wd{6Y-/Wk6:swry:f1ޗ!cu^}Oq`vk5V%U>Dֺ?ՃVkv_RY˼ e YH7E!ꦶUMR2HRZLd^}L<`'Ɨ7u.SʔrTM)=|3 |6[6[dyWcdPciX*;r{Quz߬}qtMnۃ^m>~|s{k+r%Vs~}Gc^W2?;8ލB7Y+0}a-νKX,SLF~D'( ONpwgXЉ=U=?m>t3ahչ2GX‡z9¸Kؖt w ޟU)+1 mVs޳UZI 2SU#x%#XЊoLhQD;M /w=_.Q E O.g \;Ku^㫧ow2mYͶQィ+yy:?6Sl7%Ŵ>z}차;ɛvk=l].驐|5wD*OFМ4W|> HR$+P|-gd:XȈ%x%dzR,@>YyHhs|&#U2VgeUZ3H[(D][xt]f g-|Hnqb} ?&K{f-@u̼҆pdxKF1']EBxY4 J`\%ʬh ( &) DQ!dv̺,KʙSfle]M;̦A1b͎cl`^AXPHt2dqTd7s,J2]j6c!2Xt `"UIh8|6]Ω8aKgd O}G_,,`wxPMX(A V"=0>R˘H[nK83$$wJbghU2pHLZ(%v}Z췈w^u 8o'_gYr]vQvq=ZZ&$Kh4$VGfchQX27:`Oa4;= L:6MlȽW +aǗ~4\zᇧ<=cqo*DZgT|!xHs94ͥc ŏP^m2~eś퀴^iIq8/“.&4H͗KHezBm^N})5W]^?Ab. jPKfzw4Nmv8ͶCjPUޯzoezh{>#h[]]ey=Pe{:^zf+\m@ M$^^NU +c퀔jW5K%h$@}ީ8զ:HK1|ckh٦Ffv^B$ i6Ih+@E)iul΁4"yRd.gc;h~HӴءXAI^ZY`˙a ^"q iAMC7 E4C B)@!4ZS6"#]ʼ<P%0C;EHGB Y&cy0IG lKLVb{,GQ\3Ǐ͗:1כߋ~RaRcoR?_k~J o'nۼ_F~&?ҏSZMNHL2-CQOp+Fΐ/>y ߐ-criZyt[#Ҷh_۟W-kЛ77W5ұRR3(BgVtU_?9I׏v~uTI~ʳ Fizh^\kkOno.f_ 㴳\<XtcMgӟoo2oiwVmaNJoemDmdt'2eM'W=r|`zȮv kݚ[ۚOkUGWCK[n8V|7<ݽϫGh_5 *z#\}&5\磸좕ꯏ ^bkBxW6 k;ҍڍL EXvIo;C488N29R  Z qт S,>ǤVaeV{CZ#!O&*)K4&TF 2ÞN=yKQjs⨢ܤ;fQuGD)oOh\蝫T_TA^Egalg >e;U I&OAB}rY9̫M쨒0xuKB^--پeqmɠσrBV2XflM73Sr8@Q5tx<BMB:,EuIz*#WIkOr2e* ЕC(!J&Y*"TU1Kl^6Ώ*SmMeo|@f.>~ϗFib&A K</a2-` CF5ȅ|P*ʻDo}'/{Hl|)ߵ1jGly!~#4tD+hob7)g}݀E/C`0hV;mYoQYor4;Z6:G/BH?{<0yn IfYR6U|e’@Bq't<LZy *Z ^Ug?[E\&`;bNr'ӎ tGF<o`HK'!R`R/k2hJ4E ?lƠ4X$:-qļ;nD >b!!B>`F>8]!D9]gu'pyTOC%0I) $<0c2:hUNVza՜&X}&>&Km z1,XIH!$,$@RLT̖KQ#mVl,) h'5Wwqi#oǏҞ0{gҷ@wghs(G )΀) DX)YYCMv>T-U](Ez ZJѮwveT w/ }Y=h&!~4Ε%CxJIJg@>`$SG}^ DE$"&ہ7Mv6Nʙ7I,{7K_?=?ҧ$Sa*5R]>O_vnqnA~,iwUaVUaj1yTXzdzJ՛̼|3ŗG{YݒLQfVlK[ڭr^ovv|MH]r;nZjjZO*oZ^RYN[lZbq~;jk0݃4ݎ\;yi`A|R<KnoӳO*o F;J2h=G+nEW,ո85?،<^JfHuʾt /#yWYt&5_)IH/_Lϥrx\b?Gec3T4WVc;/3)o3ѹ2^p|{2y{Rlpe {Ø&m+~;Z,u;-͋2IsZy. =9 j6>(OeBz >^^vx?N;y-of{z6G ӌyFAc1V~6;{q5{lfOGcdJgѷ?|?Z*n\i(퍨zֺoNG$㻛9Ԓ$xfԩ~͇^swǻKz<-;ꪁɧ",!+bca55LK? 8\K _g薇EDkpT=Ne zyzlCeC-=h8As#'Յ*__т]q{qKLh / p4҂)hyk  ]LR8\o7^"2 S `q9ŠWdq)bP1&j\RPJ6X\b-ʊʢfRWCW.Z誠]btUPZ9WCW)Ck=8]zp큵Zq ZŽtk ts]ЭX ] kVc骠f#+uMXvµUAk{OW,X:ITVTCWYtU;](:BR&MtEU l o J:BhOY/eO'MslF1mŤsP Ϙ?ϷoF7iHyI}.#gRIU8g y eE4MO54]ZS MZ#{O74ZLMtEAճ .pBWa{*(#+mIhEtEU*hMUA1ҕmM6 \U͉hA骠4CWi1~_W8yւفC+t"Jճ^p 6zd"(*p%BW-ȾUA]#] ;"jhW}@WGHWY+jҮ$*hej_elW] ]).5֦*pm55RɁeaMU7z/"ywOSX:h|xv}2LIg5>k xrP`?٦a=7lj!iC/ϛSPm["!g3yR RA57חyv}] O. [GW;y3|hvvϦvw`GD_)[JUoO 'e&qmT=(huﵫҊ+>eR><]X;0]Wx3ZtJ3/>]遮65Up CZw"ZCEtU[Y ]\y-tU骠D3ҕT`tMtE/{\RUAtUPjJ)FYj6VvUPJ=0m7?a)"ekYM`jV^,(v@ӯB++ \jl骠l#+NփࣵK+t5G ]J|#+B`MG X:+"7#ȱtE(5͎,351`hWn=ff} CWfš7 k\]mj_.XZ9vJ۳stetf{)]q9UkM-tEhxmOWR 1ҕWDWjjfkVUA@WGHWRk)"*~2\Ū NW4~WkO.\ UA tUP]]}hyq3y#a')-eF2oF^\^MS2dGMk]p|D2eHBj7s-U$o1OR 𗊴>|rnLw/pnXՖOԸwLn7$,ET@?MB1x1׾]pqy o&ڷ3O Sz^Su/j&ܼp's)9qO$j?wĪ:Ƃ-&=#Hpޠ[~ fRC_ܠow׹R03 #5t#H{_~ߝ\7IML:e . 3od$G9zP2'}. d4cN*zd.o\-mI3%I>Gho -7)|WC͠4W16x`Ѫdׄ[i)&Sy8X` XoVx7Ihԅ)6>Ȓ Ff]g4v,l%ːΞDk%NEfi̅6jM¢7.3ϵR fCRʚ[.\J$0 )E%c\5#Z̑읕΂G.EK][ZWǻ+RR!z!Qg`3Ȑ2IĕKekiNhIw @1 t -A=itKc^hN¬48%MQ@KcGH[(L*!ID$` }"a4Wi 4CYŜfrF/I1[mXo^gD#`UGw!^_6m\"\Y[OR%w{e&I'Z]BR̪5K>+s,R4,KkXF$˖{'F^u 5`9d]g1YIvwI*g9N:-}G}-|h@r`nZK[K.2XM Ri/I `㥄1 Q-j)XOd(#BvibF=m)"}mR$m1c+1[UqNu (хk§tp}E DnJ3HT+]'Y)dW(! QS׆RDh1c5.dSk|FmxJ v (*6Ll;8 uՁnGDI dr(j]v7 H2hFg-K75[]J>f,q'еЬ2vި^џLs yYP`0XpS QNհ֞C(Zx@ %X_;wMA[Et&J9f,?[-V-t*x4XYi)̨&4z48M#)EB> AA\J}2ɮ3¿Jt"T]zDzdgS 䨱RȠOuH+j+;.jPՀB+~Xd܀AS@zkR <@B!e#n;clQȌ:B*< Qc#nB?%17SL3HL tFnjUXxQc-. O[:*]:BRIxA65TtgBQ!] QQ VW=kh3#(l NҿjC BibW.{/eW'E]%I/[.q=iD^n!GAw@H&9:uq@: UV+ 1^ B5Vczh2&2nc5_mb_*"͘uQ401ƄULl^v')A"/ڄ{aV u&fnMkjF;d-ܡ]-zDw!t$MY G0RP8< (YiV2Mt%CHh|pL Ρ&ZeNj,(t_8g@Z PE"LԴjPyUA>xmBV58 UxX1F4@^BB&>yt_V 5ȤuՅ:vAGe ޺X:R$ 8ՀQh%᫈;+Qyn&k)' O?n7z9v.N&!z*`YeDM&:0'xB>h΁@A?)WHB%| \:$ճR"Pcj= ='6ܖS5Y$a @AK^ Bk=f!=)f brogcyFP @I&e#kvDqcQm\I`&#E4Q]  V;#2Ϊ תw IaXYvc;&gو.PB@F:QbզX5{-6ovfHOڳF& hH `f%݂ ZMUѐr52ԿHX6 ئ{tВ0LЃ![[T`M.7cs=\,atY͆Yxv5[..:b&jP`֣;V$FOB=V`&)v%[Y:Zm5EZSR@ΓDk7L =>>6"}˓ŒؓUwCP^: 5\Q py݈V]~nA>iPʕ,]T*de@e!5@J̨IO!Ƞ rwc-XiaKP>n&;lE]Q("Ndvik(3`QO:|W &ezBѢa6 Ed$ӽC`U;:@Slp?{mDV:(ZS{NGj҂c&EH=3i^&&ci` Z W-EB9yyg󧝃 v)S=xZ$ScUѫڂi#+(|jE#WnD|9\PHhҪ\ FCt:@:hI5F,?u+zgjp!GTQ~ԋBZܼ\7K7+mANzcRKm#Z9ᇃߗF< :m23#/_~lHkqɷ ,,KNKiEO-X._N6M:߮0mߜB/^c:~ZJ/B*.GN#@]?hYfH]i=_^U7O'÷ƥ6[ߴUƱd\kK 76[ȋ0(DS#yUSƇr p@ރ5WJX %*cX J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@*06A ­dn|0'.L?{%PP_(Z4+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@_8)`'(ڨ>w%BKP7Žpf=Ӹk1d5/_>x#͢69{c`O3ꣳ}پ-)u/P feuq^yaFzE9Ͱ4 cZm·o _(9R , F| €mIctEW|tѺ{ @=MPeۛeC8d# M} )(( <>uO'n-h 3|}00N'lz!jPk߉O~xWzՊQ)lajTszwnR}Sg>YϪ}VjUg>YϪ}VjUg>YϪ}VjUg>YϪ}VjUg>YϪ}VjUg>YϪ}Vjϫڗ@!y8}QK-٫%Ȫ/N0g{Y J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@(_ZozN~XRSj..VWuY!Q! `pGscx0%">{TK_q#(]`IWd%]^銬tEYG+X)tHvC?_>aWigEt"ćg1ŋGg޼ZŻQ?5M4:j;`Cv>\h}hRzۣ9|vvI?yØx&`f߾?}$e:HcL?_`]e['- :l ki !tKz{`uůdɬ ()kx=qPg!nAO\>oayEgwɧۋs]Q"_Nڭ *}T{SQ:wI11wFgaoѡ#kځs Vc?jZ1ߥt(4x%d"0LZYEHѥUWhc&vO+CUi@' P*yM({X광2I\A GI1 9NH#VoחE徭ɻ4D;ЂjSM)um>wcpwf:k7o箎QJw|B˩$6?.$Je:'?n-Km\//WL%Ӆ'`Jtw'/Ңb$_v#}k&s-`8ю$u3ONˡ-rA+$I2@hPJ?RZ<sZ#^yh`USB)/‡C6+Ϩ30ȃb{rOw}2TNzq-fr$2~cGtz8|kQ8b=_ny{u:]ӮޠN@h0j3Njۓi!*^STz~}C`1ư72-|Ŧoud\f=n5o׹xd;9!_ۨ㷱m1rOAEmG]w5ΆwU6]Ըjfߗׅ.}jePڸrB/?]qE|fǓ2ǍJ+=К]Gq$#. &~_5Nqi~]]^crw߿w?̼[q~#077m"l"Q(=f%[>Ѧ[7 ߵR{'zqn珿}; v|;M|@KX0̘pOΙظoG7]RqG*|6*İwb#>MC[b:( ?FH"yseFST0&Ds|xF M;NH|>Xs66,U^U/hph\2RʼAҕ܋Vm89p2h>/|'9)DfPFi)*73 ~;Vwk=XL(xN#r%`Zr{aCCw](]Vq# n7.r >h:sl7O#X¼ A ޻r_FOߚ,>n\;*CW'η`dۻ ˖'EQכi1H_nCvlml[Pf#gBZ`-hVo_חv2Ii֓ )};pGG)GKNAt)N3 :ItbF[ >0#IbJ$`5yrI-*BLػ?qB ]miK˙ri4S۸%Eg Xz4b>4nN*1'|,0g᥹(xX6r׊wk/_CLb`|ÌU2ZRZm#]m)wje70`qc狃)Tv4~y +0ǃwjr<-h;>o2= z:vZYȃƸjz:;_2l?w9tHn;wHyꚮP,U**$) _HTPi;GUtZjy݌iK荷CEJk+7@Jp>ii]Z ibgA*KTO 'BPN\"/hr5vÉY(ɂQj1q%:oQjZ ?'AP0x 'vIi)%sn(pWAr9+zԅQ{LX#jey?Ki%ba8T*M__dBݭa$Q>NMv8 aD\ 0@B$geB#O)R뜪׾1IPdL/մ311OQr8#}K@.hMF؅"{b`({֪L~cPo=; )xUM;NX(B_nj0L9ȳW{>s5kV{0)7K]B5?j~w򧗄pT!ʍ $,VxS r ܗ%# g92*zfA]P*cSPG܋H, L{pzS0AFN3-|̗D\o/:v{jF\*ZQY<^l`F8Fsƽb3(Xt&`5w,艌ZT&* BpKHi/mU |<\;IC2O&op\nwmlۺ{m]JzjK1ĕ،b=^҃%kМ~=H ˣ40@y8Z5/݇r>)6ܬm,~3y{ WkJ͠XY T@yC=2aw;r) UNmrBސ}߷G{WhvO(_R #IW-WiQ.4>T6.TG2?003|g!κG*ad.[ow"5-ԩ]ΰMx^0[L_&7ce;e .? QpNἢOl1pKdޝQ8i5wz^)QS ZE崷1.w}}C 8sٙ @L U])*%*9 %O!XBI+A]Nۭ,z+"J/@>T88II 3¸u .|<ڞԧxx[cLkf.F/@4> CeQT,E~Qj0޿;^/2ŏiXS.SO/,[,w%^w3ob&#J`s\(:E $2J,sLz4n%8MރܫJO A7t/!@Sg0ZkZ>-k\~Yͪ PןnCJ_m- ,|Bd+q$ي4AؚC~yx>C 6%x8"GE1*;%&Yf:gԎ" Tx)R>t"XЏjBNzH f hg欒mt=99=Å\]|r}3Ɔ/R6Khun,WR̷@n~P 4I]i++"@)NSVX,v:m6YTMKV9Q 0lsDKNr!k֔z.hCL$A .1AxGU@똪uf-JՉ =Ss4oS"5 mdw#pq:KyЭn"tr,QVCƎB~`c$z+ |= I&LՉEV'6J@fH + Iڄ$NJr@/A8pX';%7"hl[s]Oo]g˥7x:] 8j?~U]66rsݛ^}n~| /KfJt}[ /Yw|Jqj5P kTzK-LԽSiTD&<BqFwnuik-@ L)|:T"E:RH5xbj'A%p?Zڱv]uZ]Y6JP %&954rgc:%* *7F+BY=On-@0/?ڿs3mazf[dy+=CMMvgW;'}u0]6۪wCTaݯ}r87X#'5adz$뻒ST~0 DP01d8&X,jF38DVnsQKR}ޠZ)CYOcƠSbT.lB;3ad쎫t]5np WWkL33^Z?v|9{siOL DMOĂhA 4I\(cVXb^'MK g#9YM.$LP };Mb`"|L+1bwfÈ톃bn jw;Em1jڅޛI)C-(gCGɃ>EIG52Z?2x8O& ;$iL!#3^Q!քCEDXhQٮ9ufx83 ۂmAD^ ⽢z$0)x̓UήmiR,!H?g"e\nI!hv[hJT$$ $( X8*I3P* >v_+sFċ!.g۴Y/YUǸ( .\EG#AȅK^hjHFp/."۳QXuxGľ[f|p5p'w9 Ap%Xya`3\w%X;[kg+M ~\pt*lK \ekvVr]HƘ!݁\+p.l)\e++pJ 0SyT5]t%5W{q;+YH#*'u9qA;c&-N,N~C}sj>qniP4M0<v݇뇲 _͚Oh7G#;>y8<7C0%t1>[͘=6 g7 r$ju/W], "gs [U;͡K%>:x1l{~A3pObr!?V(ZOVG .͚K;z ,WC«"h8io 8f5e5bbrUqB]rh;4]ah| 3lea/!H);W`)wzg eDm;\e+U \RRK flwResagRek֧"V®^"\it,.l0蝁lWZiJzpej?_;kM3;nrvWm~C7Ǝc_]:S pERu>[S3>,9 Mݚn r~w9y?[-\S "*5ľk;+?؉+4E@w.A[p@XɼB@iYFmp M:5r3Eؽ( E(X`Q|a EQ(X`Q,EQPM( Ĉ(X8UQ,EQ( E(XTOL*]EHt"U$DW*]EHtEHt"U$DW*]Ev*]EHtD,]EHtEHt"U$DWڝGI$ߡ`Jݘ39WZJ>a%fqڳ$zXMNy)L$INDk׆@MF8Ojյ:MN,Q^y\x}>9~pHncWƜ?z}:iۖ]X/&vyrwI%eŰmaO؂,>/g =*wڛW9*%u}yMgc$,}8jfDZ*ݧհG0筇J V%eyk%Ywgg K oT:AJawQCh;qm.Ρ?'%,x#k5W;XMU`qӜ/ڶXГ-YY}4֠?`sิ ZCNHrF *q*IuK/S\znl $ XI@e$I6#:j>1ĺNЕ9(MmkZZ{+^gM>q dΣIIPg>Trxl^_F_: A1 Zk׎3[KJŸrBr*keH\]Nivؤi=46>zD&쑹|gZ0/rJ:h1* U ^EOzJIyHWa^*K%O 3G&d6ZT^1ŨF1OZbkJvׇ0m$צ1GhBO%̦(YO^$9_S@;FiOuiT(e&*=DЏÇ9%vl?1ˠ閈v@Xͩ񘌹Xl#j TrN; '1e$Ift'I7uAb\Ϥצ; ~h,E[Ϊm'[l1b4rFd峓O~Q? &ŎJ,-)ڳBm<[VwX`K,-krAlۣtEմ;oǏT:/H"k7 dE%gMW{yQʛ{ԼTr| ;=ݝ3a}^+˳eㄞzGzԜ5Vt4[MBZ%mZIm'c0D6([ڬ M*oSe@﷛mܞ3{rJ1*V)"2=w$_I޵ރzHGҾ/Y6;R et@MHbV3&V}qB /#i(iFR0h Hhe8E̵kM=EuɦzA{:Q6KVb"x,M*G*){ygV, {}=Jj5V7bwCLDUGFɶb[zz"j,wԜOlr>Q\ODiND)]8Ci:-s4<gaf]l%Q@**j26߳Hy܈ӡӳn' PJq:ErQ:m4f7!z)ZՁe 4%h4$ŔeN Vc [qP=wÛ30'iDtJK @Z%3bg0۶[ ک{CJ@d!$RuH<0C&TNNzq3jN ,Wb9b鸏>JB %M?Sat H杍Q:YRYmP;+*U {7\ 8! qkk$߈n ;3c49ιQBb1q"k!HU`VJ d>s]tdЌcJ@A"[ͤ(eF[R;' *1Q}[oJ l˟8gCƗO "էd^ThC]b(5p6w0j!Q;<1J(fW 'P DD4"N 4Օv2#e8p.@+InR~^hzb{v2c͵պJp/f""6^08*E| pmeSӕ;ˌU' i]m]j~[Os.;w7>߰Y|cojՋ=fUT`UMmuK,9qZN'R/Ag˷+ތa2}i. [t~nm*_7LmUj~hfKvib5bVbӋ2-58P W]K<]c,9?oi`rs_d`7ͥe"Y?/~MyF$x7hGm~stgPdq{+,"jܐ-~Xߎ<0`殮fk.v_ ]߆G>Wymc879wIzq>M)b]q,Siwud|Y& p7~hfQ̴Pڪc%bt=~[f2dC,0TrBXń>Ȝ>jIw$k2kC7/ZYR}^8V)wrgrmAzJsCgwwKz6qt{  /tN7LI=j%D<ެI|:w| `l \v=9k`ל-.~of\cl$@*02W 7 ¤@;v8 n"(5DsP E$K6Ysf;JZ*$  2Y31*4 ֏riP%}\IR4 /Z;!Vx?jso}o/w $O(0@6@À>y{ xrkbδInjztrLRaMA! rK U kmKWF;Xǣp%̞?;? ۂ)jhWDR3}ժx2|QM CBU`* d2n/8jEnmi='.r\dXBU:zj$,EXl5q[U.0snf.g1$/]/mWۂ;6[Ufz0lU˥\4uEF0E=7߿͖"ߔ?L]qM?'a lːVK (=OܥróULW٥Iɗ. Z2`$=mL:wq-7#~/eS"JmZMUq{==B=|%` y if$n/og ~LLG}G|SKހʮC//h6>GsQFˣAc<*Ux,U"]WWEJ'{u Օ yF lUw}Uu^]Ik罺zl+e+X }6ꪈ`YH;WWQ]i&^Mwa6᧹;2g|-ur7$>ަ35^ޜp‡*LSt"<1KPTiJ&?]\j-&DuS5]^veOh74l~H_7%:`6Ne?\׹xEj靿{?k8p>g8+#+=(2RrsUR`A(e5`磬sRV#WRk,A o}((wn3bF5JI+gurB~1 PtH|IPTիUn/ _:_ϖK>|WwL~dZ4 Мhww\[X|xmVg{lb{x= EJ{^gœ*[{6]io#G+fSRއsv؟n lfZwSjRbQEY]zyR ўz4=]] ]ikּ >+d)thO=^OWRtute DWR b kWa `Ifp噃Ug]5C}Y6+ձ]OoZb ] ]!ZeNWҰ.f-.'BvnvOWCWreJ29ˋb b+D+u QDT b +,1]vD"JR"dtlP+}T"6DXrhbJDyiQ ҴbAV{tu9>;kZuBtut8`K+kh1w~Qv-7+[j]!`C+E[ R%ҕtHf-+.X.=u Q>ۡ+v`׳+m؞[j3U33Yb=]˷8=;2U ]!ZNWR.@zshڣ+le9tp,d=]]"]qAVr *R ]!Zy.DWXb j] ]!ZKNWBEҕUR.iT/Ceoma:_WfrNoɂW s5H#.\s9Q b:xWI'4']Wjҝz"/i@;iz? &d +egV -&snq- \aKq- ZŻZ@>E0ܖ .gbb$Qڞ.,0BD+:]!JkWHWF2tAt*'Gj^ ]!ZNWRў. m-'5vHg7eg ZyK tulSEBRBWV4=]]"]1Ŵr 2V ]!ZyBK+4 b rZ ]:=t(3՛ЕPɒ|Wb r"J+D{:䴧 +I޺ s(F+"e*7AQDXbhJ] M#ZMN5JҴҔKS]`U1tpU1>;Dky PJz@ZpA +|:-'ў Բ +%XrvDUt(upte1-,Ѯn9fVw=]}3t%zqE Bxl# .?Њ3E7C);j JtulSùCWR ўrf(yOWHW(BdAt*fh5:]!JCz@0^1"޴-BLBWAm+D{DhWWCWVuB7/꫊:x%Hrw[KǺuU_Ll` iuܚs}wvRF={- ef~# JQ`@mPoֹ& jFKs0.j%.bW W=yW[кTrR;7+ƚ;ce`(*`*!r+#,υ䍤 L0v/u);|7i#?lRW \?$w>5Y!~M'>[?ߏF\zloy ?-4OwvwW[ђׅ׋lp~;Ŀ 7_V N'Q~X [;j9dwK<n&zb-X Kkl6Vk "<M]JZCJcR[D":ɂ'v+ц}"YbzA\ f>٭cR0P܀"eNiS2 DM$*\&1ڞGEsAc5,Q4.T|R/J#i7sق@AߐHB]qOcP 6?IH=I~_ ?ooO߮Яkޫ3wU !5_ze?^WbyY論 !}5#ށMg)n;~YH.U nJCBxckPq[RmԊ%:MÔwRKxNZ`xM.Q%{F8 #X^zb810Œ7:gRܳlS 2i"<.* BjϋپxR=?t:i9a0YKu*(E-WN:gT /(^C kt =C<6Գzz֘Gx'"ǁc%6"!0)4:DƱ%t M'^%It>Sq\9Viq  I1T@rF ˝Ae$9 a\?[z?LǪŞ9{цK)Em[vC2@(dKR$xYkM¯i8zʽS,dOI`f/z\62"74~;ii8v+}~J,6nh(t.:ʒӸNYz p2֍#RQLӧɃupK+U7.(hv&΅o,H<K_U<ǎ#^$ψ0;2ŗ{{ŝ/BGV_ht\r Kxk?'uT:jz ~}mO{`'RcE="Zj(fA laUK|/79|G"?zkG( 6`nE*sc ԑ5/Rl;gM:v-¾E6#1t9.ZMr02,l*Aѐ_vl^=-q-a4lf_f).7e~M4g?i.y.׾3X(RBhYza ZQ> J>FDDuj 7Lc aFg2.HV8yU oS>1YP`]"0IDxIocF +ᤙs]v]Ά[M݊}n_< w/γ{2J8[^XB+ͥ!Sl I݀t xpfSy(hAr z%'D!::bď)pP=;wmXiv-\Y3OTT"IIpAYT6h"X"MfJX i@BSQ,7 =o2Kgb4ј >i N UׄBHBH&!9aIT%#Vdg*e`~btǠ{RDJ39 ?0geꝉؐ"SZ%fJ0H&WZQ0 < y Dͣ_'C866C{w̝1ǭ&[sl>r]Mpm\$녃9.YjDd0O-\eiӤD2dtxTx/V8] b`t(#^%(1. H.s'Mup3ZlvF ֌D}2@M*Sϸ(bx #)WZk !ռܥCq*k>w~YxXЎzpkJޖ<ַcQ_+0m%WϤJ9o$naZYŹ~|n[Pcō U)s%&WiLWcOƒnޕ6#ٿ"NAW>0&Ltkq \;G %szl=7F$rdAZkq70ǣ4oZ[^{wM!w)Bn-+:fJ̒ WՙЛ:4 rt/+LAD0#sMP5Y(#k,MKP/;!;9! Uz B5,FbhM1X$13W@,ʂc\+52@ܦM"Y#bIܟ׿]C-nZ/7Y >]7reDsBvYmjwN^z84ܰx_ȓ(W 6I+9^!=gm0\oVQ_e~g]e{2?Vc+A=SRK|O ZAQk&d(P0 a9F!47i"gqzszkIxt9`0NRr&sNm}9{lu٧?U#~2~S\ } @)BtT0@&xrYښRY-ommʒ r=EJ* OٜLkÝj{jY zƁP}/.-*ud!b}Fvr? 2VӠ;_c 13JOyc#)U.KbBxYFA] I0MM4$#\2. pN爙Ec+{ja4L(xjP-+{myngL,er2fԌ8%Iɠs,J#evYXff"LȐQ!_@a&H>HU"{EkofnؒEO#Ve;UQ%,mtr)I = Id Ҡsp9!rƴ!@rWrd@gB jH1`7bVdn-s\BzՁ⤙SuV%ES//v~q{D3JIgJ)5Y@,w,YdTEPMZ1;+C; [,ܠp?M;Ge|!ݏ m,~( l\l WmjZ ZɱJ.]9hr>fYmQڛ\'>UAJ҉ LHϒOʚUTE.+Y;"Kk9 aqn2 A)f6!fK OnA`CDCC:}f /LZLUVk7=!>nm>;Jc^ igIߗ|? D2"hA/o|O~}/x3?`- iDn2׼Yc[8,+f4.Z4dYyJ/`$g؎v[1gg a0cA{b>(HSZ@Fn΋ega F^6LP Z0v4m7- ]<`W{3Jߙލ~Ob9mQ/Q٧bˠYzc~t7^z;}$<́0;K}9f%#yUo58#L/4Yh@Ԕ@Y:NmS2yR* F; o@.ipru*0@K&PกP]UPq\94Y.4#V{.>[t)l%K^nKx4N3 P11$%,EіS9.Ux *w޾<ڴ;f6( <.C[s/jVPJw +ESZN>oɉXcs`\f24nO)Ng~Kv޼zu  ]E?l;JHK :JCBVX ϗnIycj|wȐvKOߞx.^sjzؑ~(H"RU:B1hGpB6jL'.g i]ڀ i~ >:JD䑳8}TH'U1M l(v,5N5JDJCm/9D8!zDJJP) hH9*S\YNtM_%. x Z>$tt9c2Q^;,(R BW)qj6T;ޯ3I]2G}t 'M&8.&rt|W'|OP>0T @m4DV 1fqgYl{W3a4wy2/GA}j2!0 !|mk0{ٽ_[G,|g N߄ٓYZ2G_ ,8BŒŠֈ7GspƘfq<em ΛL{ N#(w7]ytme7EM׾&ć@^e6v Z/K) >մɈXBkߺp c|:J7T)Jo=5p2c~ޚ dC%v[~q»o>WΏJnhW'~>}~t5+s~7MtO7c/UZ0ZX=yhah۫q.J6 N?YL 1 6FT <*YG5lQY 'cjOӅqq=hHTL%y2m/ɖy %5 cMa9ЈTЈ@OTeлYIPq/q3`5=X`˙a ^$v4hI>d%,Ct0v} EkBh4/DmDd 1Q<$"TȘ0LSG1cTw F7I6m Л/r <Y\&4!ܼ+;6I >^6_Gٟe JtBsj|PɚxeyyEow凉5qv2R$##Do]f$@LJv8㖈@ 0LݲK2ol2%xs̯-!stLdzs Z<҃p8-Ӵny*{ZS:ޭoH[}m/~{|zs t*E\˪3it|Y1~y⊦Qn̻J{%F!ή^ϻ/qcnoa/fm+C> _J;'i7? 'lz%6>qgmOe|h8_4t[T{oGvſDC֋@xeoX,X !S"WHʱ3-E69tiW[{nuOYo9tց{{k^ޯsOl>֯-_#XpwԒZ7;9_yO._!~~Ջ߾^H_<}275`$JۓX?vzWnO==~tiM8;(ٓM u{>8x7Y .yYۗ@6zøE޼J՟R7 !^  / &S/‡?qG̽[ H^I^#e S6Zb[t@eHE餥SŖ HXS/6"o=Mq0c9%EL((]E!BQ,d*&rk:;s]o^_>1|7vag1uU_9\2*9%\ٙ< ʃh=jmgO#{چ[6,^ag>L}vV¼ ntw+aQnpr+Vk&+ViWCo q5Jbϸ)WƩ4-\23;R8@18Ns~jwZ#q*quRRO 㮠)1u\Jf\ - IQ7bz%?u\Jf\ P *; 6]vXǾt\Tθ:D\6s݇hc\9@P^nn>^N nY?o,)A'6 /NmCV\қD2|Y~/0m>JcQiѮoiUo>m)h ac-:O]uo恥}z 5BHV%QJT]' zΧ gvCqF攖MUD* -*EAs<e#EnbNK.C,mDܓFyddlZ%o\VImIYe2)`:2*Lz!ݱǤ Ph5`#M*螮3`K,-\gzYZ`AN}i y 5/-S=]g`/|7bJ+V'*͸:@\9rD#\`~3Zm+Vi 7+O!(X]\ߍbNW!曎W(X.>km/bnbaH+wGA(va\kqj֮Ʃ [r#pf\=vSx8\}_c?鸂J=bp'+Xދqr] Sq*V θ$Җ P7\)y;u\Aes1x" W,L7bOjZ7y\]$DR5R/$cB+Cwhϡ#L`Ϛ3yGUbXT3)DG`))YԽժɻJV9̸$r W{qjCĕw S׷+\\Vk'+Vf\"B0Bt -n.1@Tbbθlp8H'Qqr8jO>RO~ WW,5a⊟nlGb\ zjᦎ+V)gwu"]` \AG/]+V̌ĕZ;͎ viU̝O)9}Fc:gߖQ쭙y{-N9t Ϙb ,l7%65\`v+ٹ>[4OP>KJO>V8{4rY eiVy36'\B Wnp$&+43Wo-o890lW9^_hG11{rz- -ix_m\W? bz+4m@QY5n¿gu:C`wjW17ȗ/ q߭иxBlOۣ֒VM N*9Y[+^|r {~؃ o}Δ޾&tNn3ov 3z ź53BvV_7xOVݾN?+mPE yn qQ>oϟ=w)[ķ3zpOyՙ7WPԟȗ˟H,!ׯ28sZߪ^hdQI#E2i+5(mQ3]VFH#>&R{U_\#]*ްw/K4uq<9/[jQf!l%D hUזMFFa4I YLƪ;\P)&jQBr](cZ"۔QŔEq`Ԝ(6Ǟc'PgK4ixg?.4m.\m D CqmMT$]jhs$tTK<]uZ N%c0$R%&u+2$뒲jѵ^il 狳TCԚ1$kFH)Ldit2:b9Xb> Lf cSJD3fhiV::$Dy%C(d[5:2X ɳ*K'jjdU9挑H%9ʂM/\ 2E%dC0A8$ƬE6,$6P̡2" + ٻZ(Z*!b*!x_@1xK nA7WHbj$ PF*x l3ʄWi`Ǒ$.V꫐Q64 . _tk5aUE$AR{)F lWFb$ !7ê5a0p(SP|k<PRP@D&\I mxkD yTBݚr,XӴC7a*?%@]L\΁*)ܙ!Oź*Q 5x+  ֐th¿gJ-07Q;)4GRFh=KQ 6!}] RYUW6!;' ֈj{ $}Ysopn LcS|V*B)QNK-Aࠄjb#lV6 L'۝@VArͨZQ |[4qȠ =axrnvۋ q)b( 9ix>#)T'2[;L')Q /~HQ0a/MZ2^ZܺmɘWuHМ6P 3 fZ.\pd\n:d:Prt9|Z.\U1>"'aơE ʃZR$RFVWe2NOjEp bav^(^| ݗTe2jnuGͰ 8_}H,:S' ;UϳW5 V18&K΄fBNAU>}t_]m;$zȊECae,^wp7KXH@G uI'9` 7t+Sօ)wҘW@$g4oi,n9EP%QMZp`3 >zdx_``Z]Pjfס:BA^k<u( 8/8LYuaG֮ PZ~74lOfp vA <0Fojp9 _V0C<// 0Bce>e(fPD ]v B`^B@YZ O6zOv`X#*p>Z++&Uy}UsqrE3S0) 3 SE;B`X R2* q#>zg p lB׆"T ѳd 1(7H`LpRCuY9<จg-/g҇dS AWе, wӮA@-pMWZAǺ※ЪH(Zx7X'`UŌp-+YAhg. , f!JVGA$%U&'̯^=@p!`~7X6[TE$pUYfy lم2X2 4+ <I "!< WF @Yu(5 wW$Xwr#8ØW aש^ݬr ʴe 89D uˋX\c.=W/_~S#ʡ9`(xW[@ swKJ!ހ2IڡN1XVUm$G>l9Ym:_~药ټc0_,Mo ;b{p/ist~x3߷f~IlD#Z_>.tul:[fMC)ou\.ԏ܎+?*çtJ 9q>YG)}WY尤HH)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@U t{b'UZW!JI7MR f@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ*̤ )nLF bW) H 4D%7@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ+ KI HG dpIF dpWJH 4D% Ґ@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H 4%нz?{ uvrX&> z*> 3) gY8p P A¥!>z=K΍kW%smeSB-p BetЕ Dt,֪Tk,R+gs^U'N3U'^UqCWP 3w+Otuhs/ 6.B֥BW:))]`#L2tpc`\] G%^BtٗڻU>B0x<]!J+HW> K'T ъRJ!R9+ׂܶ44&CM9G(8פĭj8YD. 믲rh9YFes9RL{// %8|~o.4[/'Z<.ovڭi,)޽d6 .Xdf듉Ab!.Ah*gdbwD{4n(q0]}mȍ̫̍y*U(Xt{ul70qC.)/,&My7Xi sݠW"憋:ʳsXCSȲ7uY\@^¶){}QڽiGl?l}ڣa?.֧͛&qA &K4QMP*Jr.A2zgH9#Vdփ#4F>~Fٷ!~fls6bpۏ1vm{d{Y6߶Vٶ_s$@hS%Yu3y Gftö5B[tXf:[I|ꥋMIiXJ#6"o Z7h7(ycn> 9_ls V^m/Dsbw]j2Eʽ*ΉBGF;[Û~W\kxʗ7ӺpSfp I &IX\UQ}# L]XUq}SҠ: D1Oxَ5˥ހs_չ`uR6Q U^VSЁl.Vy>gKU" .V_q6/݆yq{`\=M0rT[[h ҹUnwJ7/PSmj/OZQO]ѻo9&L]+ 'xkksD㉪X lP̓Q`x7SW'toV+d+dh27LVZsE\~hq쮞Y:6K]<͂>%5 ^vSm[#'2b1ci.OfV|Fȴ?nzR靵_CƏWwxMlx$58i(wc?:C/PdKZxYtk7p ^9 9OM§k{'gYc/tm}Wܝ=l>cMu5oԇ]V6O!_f݂4À5٢Uv2 PLW `xqѬg߲,>+su;]‘yUī,׋,oyEOI=w>W/)*X:iKέ\Y@G*[:7n6wͮ#|?>g5sԳf{Qz^gFv'VQ|~*2%)]5()"]Y+HiG8LƻBF-!ҕV)`i|2tp]2kvV1wB +On[(sqX@)ȻJbyn׮5gN֝)߬E/Jt+Atuhs'}]+vv\S+D{.iw7V] 3R [ ]5p]*thչvlH] ƙ [wpKmIDWOBWxhd >\E*th;]!J剮HWI }R*O6n>ۘĞ0Aw$sS- 6lB1ue^N.kwKc'3. a [%Eǧs5\+[m?Pi %YZ@Fh}䆖`V&DWXtm_,SBF] F1'\:7Wj ]Z| PjAC+Ϲ.!\pN*xB{v1n;F{Wd< ]uC$raف$ա]Ϲ=d z ]Z{OWR;]`t2tpN}+HWRg*\]<7 _Wo{ӿ;W`ܒꡟ0q"RN]4/] | p@Mu9?X!C~@|v;~e[‡g{8W!2b]7  $6~TKP8dg8$E$$v_TQ[~e_SX{D3зyڢI&f\0B5c_o+¸Wλ]6%S ơ4,DN!)`eT eIҐZ37@˯jiZٕPDuq#H/չpJ&fZ>罥Behuer='{oVo/4_ ]O,PΨ2g3JCխVۻ̬o *2@)L **Q;c U2aEf 1BJTn* XB H6Lϒ"8G!ьKn&&,[P2 hi{C7C{Yu(v/'93kڦVQIm^kLS{AL9TIVovo,;䤲.gvMQ9g͝m$W!χH$6zA8Fck0z(A ɶ)=11qP@˔h[ϢhRBk;Jp Tۚ5rvkJ{-8P2֮.d U.(sL32n&=n˻2Le nrkl%jCTH ':N6ҘH"!RƜt$(r[hUcsÓeƞpgYmr^:mGMLXVmZح[c0Ǣ[C6oYk>!m ]hjc#WZ<$mhU& fB !hEuM5u^p>&c@;ײ>l2G70"dx,58Oq<')XGmh&m%ʙlYK$oSv_KXC QZ )8Iiʣ,e=FE5ZLh4!ojY#Fnx{bȲ|qɡzQI/v#(2a\T RK\ Zy8QH쉎PAU̟ )~IOIT:f.ŢQ7L5c5/0VNcW`g@X++|GƚpQwkDzZ|ns7_2O&4J{GY ;l[T*Y4.rΏO͕iצu>ۀ NQu(1i2oƼ|Qxoõa-jcUo,rG RmT cIچT:Qpj/^e.[T2ȁP -!o4`NTV5,Jhi .CtR+eBL%QD@NJx4$a_<z"joq5{ϤgCsYd|X,Hol(o^ grƳǑ^կqT#+c,L2R|3jq3o $ x9si)g9H׍ΩD4Œ2QH\up!(xmv [ =b42b0RLj!9S19&C@ r$Dk쎄@8Ο[S>4fM7'e{s .s}\5?hh+M^ >jǩ"1H HPV%낔= o+;36l*HCƇ&;<'eRLk(v|{rǼ;FsB+3θI9ZJhH9pVTd%?BqMm[gS_+uo:w[۝G?Vz%e8!"!(>.RҎ ڤ4Y[Xx(nDW\q8Zgp-=(څȌ$)'eI2 pX}/چh}P&(p%RRƹt[$)}&A% 5Ƅļi Om{Z#g75t/g|PXK%~=U9ޞٺF bĤpmju 2 +岈ƈBhIC.ة<տiy*z\Pȱ:%сFGh%ˠkr, >Qi[E:z|yC2bLDn)+X%632ev> #{,Dy7]BiytC'˲ n9*oszn>$N/.ByOEpBP8ΡP\uH!F7kt>hfٲDѰ$v>'* r*n@rdf>+7C5#$d-B6UП^ts$R9-GfeJɽէ glu{"O3NC(7 ;}\M CQu֬bs2>>e&ldɆ6 geN\r;.5ۻν~2(n )[l9uEz{rG6}߾Uز,wmylw{r;?G-nl9}x~[uvbMw+u[wQ!Zب"Z$ROU*DBa Bq8=W3=g(O^)ƃ$ & 9ˑE*cc+5[ yD ML $N"]*Qfs3NDbKĶk=Kŏ x^Τןz]Z Ai͜&V&(Kjp4Q(ZIGNቃR8 Is9]G` $P#h4"J[> eR]8x[J.||`P4;WB..vOFWE0߻זڮ8G:Ro}~/<"y$!74h5#Y06pT= w.cɶI8*#GOiԆ mŨev\FF㐻NV]zF;arC yuga}zWȎButۿCݛNj7?\~|'Οp&r.&VVP~}J?Cy[Cx mڜl׈60o˕1 # ?v~~~3!ݻ{;?{Fr sv՗ꋁE6 NH/'0}˔"RzOpHQ4Gȡ4z7sfz.%Q{QZVMbps=|&5H}1V?oHE >ք@Vv/@x2}8cMG+s$~ܓdx)eF3U4&EI+UFKh)ƈ06rHh% ]7&i1n8<{kM~d;t H`<c*&>YysA3Atfi~{}ͮ}Ľ}qt?'-θv]yVʽ<.<[`Uuyd'T8l"qyJ"`c/ʵp+,ŐYyK>z^\ug$3NKQ#7y@{A^w@FyT.kER.g~d]B9.f,!_ V!SaiƔs-]z͌f&Ȓ0}~rb7쟯rlYE2bjJ*c\2KwOQ%D bcF&k-4ErEUaϳ8KF?[. WFq 5wg(6d+#~ӻLƕSRtMl&:=(|,e<5}WkI+-Fg#NXOY 5v@:, + $Zrtc]Tp&- %0d 9=G.EޕLҗ"|iL*rT̊dV%39ρYf*}V%7\!HՔh d H0>$ʊ~0KJrǓA(TI_Kp_-u/ 3](О{uIXg2ĴpsDƋK9F8sJ^5f\v`dĘh]VjoRDͣGFLe72\L'͝:6"Ix"o[^q=ͷ͠Bہd9?vwywam@UF& oo@O!ʩnHU{nnr9/#`+&X::Dc %ihT<[mmiEa[R5 &IႲ&*Y{M4٠ `s&I A:bXgRdƲ@gb4јAG%t Rٍ4UC(B2)`䤧&1,Z[ 1f`xr e{ /g(V /h,&L`K~Q/%&$ivBoWM\s}QN 6AƖt٠ mUglqs!mH(?`;g<!"P Phń^OQlI7>Ԟg xQoGVRrؤTV2lerzǨ.J# {f\&"~yi[ oW5HQK*-V t0舳9Ԩѡ%ATvZX27 a Wu $]RV*~.*Hj?nqnQmϲ;.0p;pwX;JAW };5o5<ɣTxf.[Mi(AvX]_|iKO2nZr_Cb7m_R-Id6{4kCm SkX`0t_m (ơ0YȚX>2S" K(Ml&5EB˫&z.wds?HE)E!h68qk! /yn^=KJDto:·vm#m޿x=]N뛋OmgݍG)L/1]ƺ|ǧ#=q7Z,vv&f6Q{aēΰkt1Awr˥4_}xYTNQXȉ/s{ xZx+!%Us*X> ճo/gé_wìeI)qRtsr'd̗Z9X b U^q`\8z[5UYodQj#O/L·хs`ѹ+AfjlFbBنF"gMe3Wf >]O8SKоuWBK`lk\:1sT"7RihמxN,i$RB'-T%8޴PflibGJϴ,çIK3( LL[L Y'^H*QȂ9D|JWECU?XJΒ=n-/t2}/\{v!q^ۚZa \)3e!夌9"䰉eΪdS:撐%3f&pcZsA GJ%g}z|su5}WNV^qݯ,_YJnYl~m;65 ?}"e@$t##w<6{f;mGlr/t\ FD!8(dQW!Aa' 2 @y>q"i 35(q1%]dl`y.0:Jrᐢw0L]XvyL9p?.k&uiy(al`E@Lū/W-lW3_uЪ$nE?&"x.+$ӻ4'h%S H6DT]?_޼[7u ~h+%eK"k ]ۦݭcyG^}yB 6,Wz7rHOк1L[f=._u{Lg ^0o?M[ 7hMRl`ѷOh6N\ެDDvf?C/i-N\F*Z8sEZc7WEJΪzJ'H`dUW **5vsUԪWhwⲌv'37_4~Oj=#?O髣%_lJ"̖iAp‡8Khyxs@g$s|Y;٧zzBտt0JjRjcA|7}h3muC˙:tζ/?7+mFj).H47ϤhgWim˟49HY똶/nAJ*BA6|* ˫*ʕY@52Y8n.O:EupJ!pײijm,5o2Obc5&&˥٨:C kj1'iб&16AG^'s4"5~?_k| Xk|^cGCf_ μD)qIM9={7qm_t^(ށ7__wV28u`xj~38$M5%z %ΰ}}ϡ:׫뇂föXPe"ʘ=d jҥ5T.6 0Ϧ2!g u2Ru]O_?SR9d T3)KFG585 (`(PTs0! fBst~Rl._G8fuH{fɮvQhvq#2a\T RK\ Z},4;v=Ի=yƉTw)\kk`ZQ4 K7;_{~ӵ.RleZWdW0)dǎnP7+yEqWYʻ9R䀌@s &' ,ƺr,"@D%[E:9~NEܤN;)bB~lJ5WDJb y`=xɀY$r5!<]uw؊$[Ԡ,<㢓MDf7d*% lyqz^Oo?] “'J*8E+!|P)B@2A9"vI'I9dW3~4lX{PLŌ}(-4/K (BN#ܟq¥6y"$b zbaҵ!נn>[y- JsS7L[7ڦh=ϹI&'Vjއ9N O)YOZ2 FU4ƔzRe *KUX~vO{q^rӈ)x'YrL^)ƃ$ & 99T&#7\ᕟtu 6Urw~-/V+8|2 u]BXPs#R^'#R Fx$ogςh,1fE_TАHka"МIT$Δt uӜ0^@ %' gQfg@v,`Jǧzp:۪|ժfL9KP-GBUMqj`V˿VMT4 O|ƀGt*S̙Tvo,Naq;ҽPꝉTnRpH8xM CiX&s}+ V8% ME1)Q>p^Rk(ȅƁC) 3s *(hK;GgKb9Ev剅N$}dͣ#ߤO8h/owt54vƃчӎײԢEg6S ׅfN+LZԥDsFa( 4Q8QD"U"'A)MVIs`G` $P#h4"Ί>veRn|pM5]|k?zi>K.0ȻcgJ~zKOߚvn%^LV'e/Y翿]a7z}ys pBԉ?7Alv?yAU3œ 5H a$䚜|ЊsÉLN&S4W'0ZQUB;ȉ6DX-n]sq[?\[^i}p9OG]ڈwԯub̳R͊[c%@$luvٶ 7[:1ve;2Q٘罥gkn[kkESs]}rE`X+Ae3ޏHo8~387 _ BVI=]uuú6pTۧ= w@O_N^Yk]vrG\~/q~7M]^x䃖Uq?lۯبW5SΧaD-_H '@ 1<@Lv/ Ax,Ł®Xfcn{L)peFSU4&D*sL{NHBs) *7?r ɫj|^ vp @1h$Fhr! GU ",&N}<)r3)6'vJs#KS=]Ovܕg~ࡳ+ XutyEECDkY`E̋; cyOFfagvK_xUU㕓oy䝛I8}{$$f0B*\BQĥy\WڶӤl(ejSԋZ2EIֽ<Ջ8s[:k|^y\m * ݺ}oiy>,n}][DmhS̟CQk?]΢vоjIߙތ6}p{wW9n4h},Igcz<}WHފ|=ͮ Ќ%CyY0jDbJ8+~FCm^z=W tj!_qD$UAnee{ʐH^'mmT%20C΁VT:f8C+t@"{|^Mpf^oobM W,6/q.y,m"xdj/,T ,ӕVϿ  Qx9si)W2>' -v, ʃJqK42QH\>yY&(d< E8`Pk 972S깁̄},/=h+xa*4"AT~R5K %ѣQz;$}BhijW[/m 6%[S5o^$Tľ0!ɜ4Th3>=ZV"{%MJ8*bu !YϞDn3ݙ@9>a~ͦYin߃PfDiM  dov|^(?rhp.F?5U.d_i(1?d\g>p` @z(M#L4QKAjI#^,JeIQH J\g'mrrZrb` 4pzb,xx8b{=݁fXsb9E+S(ɮI(ōHhbH')PJɍ]<ݖ˥5p4?9>jey^?Kk% Sm]moɑ+>%_/ |bQf IEuW=$%J&)jZC{ 9bOOUWWAPU%_aC}XYOX(RAT&`DIBW $ogRlWC ߺ)Hx]F\\TѤ $XSJͮE \P)dcyQYi붵o8gM l붸`|pE'82pzۅFa@]b3V. iJÀ'&f@{0f2Yd+/=SH-4BD³r偅 bǍv[4p-MnARSA@SBt"l:((,|QbmO9Y^ e1"- !s*ɡ'QX, 1R( 8O5q@d$b̘sld0YBFdd$j7Xcpc*ʲV:hV?Wamfdz_tv36"E6(:N5xD.xחѹ=he)~%;߼#<3^(0PW>U\OkVR,˿~;_C >ô[: vTJe("vƐjQ>xHgϰ!a{ryǭ ~ڟSЪB rTX:%xM^UuEaS[X 0QR&ym<(2YbbP Qy xL=eׁb oEi@ԁVqyvWZT]ݷ;v}+E8byxo_:? mA:o).%uId=_B`#"./x^ E0XPggeL*R.c&6!6#Ζ|^O?̙M6r6c߱{e-n/S>d&bZ(J9"[5Jy MKBL0Yۊx<гzFڐ2I3!vպ0OQYu1lI" ȉEy O/޿yPD$55xqV⑤AǚCHt9xMEsIN~ifV?t}oݗ  =&ԓHQk"<. X B6K-@χ |曯3{B2y3NX$sLdT\*1YeGD=}ID!OC;!Uhɢf"Fˆ0(T&,*rN/p6^2kiPAZeQ4MJML~r>%6+'&O6?i~E1?x( 35UjACw{ WO͜r/zSs>{^q|޸r۸Jߎ[(̮?|xJnfWW}opm v۰7(!2wGz=,/L*̈́R9y. WpR_Ҭm~?=%4s lK7ݷȓZT4EjR2xGwg: O.-,f~dлz'h|tȘ97])Ňmj7Y8tu4o-z\oIrĊ:iǃVO֝5w "~0G7\7x{kn9nR.BY= "Xt2R (P˙S,%91*M*c6rחhs7<îl")mbK0AN%%Ȁ(TjIlr€~&W507<߼JfqP_0ƼpX5FG/Ա| Ok<|M(Bn6ÿBBbdLd)/:t %-'s"Yc0?/t$y]7ɃT'yL $ˇg}cu;Ww5m'!h$]ɔ2T FnF-'&^ޓ5:B >zIN\AhہYEvta^ƅApB U: pU\gO\֛Wc)+}5ߙM8GYj{jp-Fm!_?Oן֯qg{}*_V=˂wedXn`?ɝi"}O?WFg=A>g ʇۿsS0i<]NySװ;E-bxsُtCgϙ?u!T٤UMפfrZH϶V&}&g54۳E8?i~^w)݁fLOf}ʅX |v]o}o({!+ *w'CY\p*J_JGMg[)4/Jo~Xh/^_Dc5T|ԅb}%u8<^=K@Y^H20e&6MeŇ X]-fJ'1KRbWQr{ZsO˭X4\QY %3轷ճΒHIB"Y-F9`T>n3Z3?O;oocl{/p_z2E?K L2Ёӥ3D\ey=N(pR^#$K4)Sm(/1  faP@a.%S=RVEÍ J?WYbHZ3oٰ1u|Eu8>%~C%f%BY\Cu5fAB R A]*hڶa^0IR֦k| YQa$SFFUL-x# z6)ۚ^;Y:~Jr+wgҳ{za}sϲb7OwCF,ª.KpIwTP;BɅv!rBr.d5W1V{mDb)IDu^ tbPP_H$kD ]6(|`?U29i&٧iPC0yjE ׾σ]˻T+PH*il| hk'0s$f䫻=_ULԱ},(-]3yn>USӀjk3XB9s}F=Odb~앲/Mؚe>'-s B0袊 [߂Ъm93 (PS0R)/RwE$@^\PИ6g =$b;o% 1!Q]rY5=D $JX4Tf#WCP|̖\Sʤ6NF4x 8j}tLKuNuPrɩפs5%ow?ɛlʲd!-ߏ˜T{KϯJY0bIXi| eCP0AYhwLg1dJ FS{ JJu-"hzr=k]*>D9a)\N A" Tkdl&؎Vq * NFp9OnWfb겲}y\fl ]^>^^|`Fl, ERxDZ(e*/DLR)$"8U h4Tg:UM . `S>¾@_x(16Ff7:::^D.Xa(d23Ci؋:;ƚTV#c@!Xuc3qީ_T8DlCAD="∈;[y$3ZsdqLL0a|J[`_Rfki-Q 0 h 83T )D42H3Ȟ2N&M뤚V<38_uR1.G\"nc.0 :+JPBEQ":Z lv#.l!6CwihܯFObu# n~|G#ُr[`+77E#)'UUBFVV-՘! rT5}P:!zV೿YT?rHH҆HN'/LJګ"Biڰ5z Xݖ;f4Q\D,$n:d0S 't&dSuQ+qlt{t8}*k}A7TuW}О]J3}v.#;z!vuGcGLyYo m7_>0D\_] IFUgH]<>yeȎ$9$ }@Jiَa36"йhll/ݺxm =y6v|z@yr9&tr#}Tdry{}ӷ0Wߛ'\꯿\#\1[ZX-^#\jmGv.#z!JzT e^ѩerhhocJ4wo[xӀSْMM43QgZHo!m M85E"z(q(>dg]SUzT^yGK M10U#c^t#IKMAQ-hTRcIxeL$93%V:`Z%rp qV#ٻռ'|g?θ\f2j Gy[;3\3w>"(y>#׹!|V g!T]_}Ay7te:ݷ#+)Z[ɄcpC+d:8n$DDp`LKY92$[Eeϣ+om z:(MY1\ 9fd *%[h#!H'NJ)E@]#6t})e[,w/]5n[t.ki\J+NWcLۏ`m֫?rkmsp8yeN7i i1ŝmitdq:y$)HkխWx UEke%1U)X"4@޵zi"SiO+-K$r0EM6Z^Q*44KJ`)mFSRy hMc\s&hzy2K_W)-}'N6W+*v5>5:+Vj"~}]&iy|5Mix3ؠJl" ])//ӣc霠TPJJ*"U&e(66څ:_Dh߽_ 4}͗%bzwē_,xj:%0 !|PdN5#^v}"5'\,T3D2dQm H7sאd c&Q9B eP{D9?#dC2A=ӂ\L{=a?a7ijj6,3LqNK`#K6ԶY-nrIׅ Yy2K6Obk9DwWtwjvi9G']޳0ͫYHwVtnZG#<-Fwf~[\ݜJvVyVs>6XKu''Xs[7TEծ{mS_v©Ӧ Vbei&au5Qth-e}xJT<ӹrʩJJU AZE'i;ޝVlOG;ѲVZs6;BriT(U|@L23!׏QXn>.4Ar#U) f}LE4ҙq9JЊlq"EZŵFΖ'Cu5~Bs)ˠOwQEٿV7y- Dn/˞x hM2GbdG){<<4s<.{%LK#9~HFXMX'nF nKb$NR1C!i OcPĸ( -[XaG0rfdL/XhY;ae2ilj3ܒ-$(?> jkF/'FBp4  Y̘%2#T ( RS(@"imM8r ,F%m ,A`Ld|8om8x+uM%-kcR«+ {,, °xi)Eߚ] $5JiLϿ](қzs?NtG3DD*CX q+zzϴ#g̒!P2 >- tϦפ/UkDVU980$ 06']KrwZA87;/Fׅ|6?-cBS/{ k%RQ?-~ߌݻJJ8JX)?~4}&fM7O\&w0<SowfZ1s}cbrK6lm[{Αh<溮Ro4X?OBfz+jXo<6ףA j 6.*|xθ=??v? \Pj4po5w%/"@h}ehkjo1rmO=_-浲Gnwkcv[n-@B|r(e0 tUcTx],A\E]b/.Bd_J|0Pw .cB,T K=ҌꃗL BTs-)q&6TIz_dIpu`m;K8 ,2+Nm]BAR!VVdrB./ۜ9x)9fjpgUzxu ^!ZH`C8&<1祆]L_цn68:뽐 pZ*0NXzR mQIt.mzM9hdm s$3!a?9w1[רU*z YYcelD=h}5Z~<jd^钴s^&q*E('lI CG]D 8 "бP cJւbӃZhY^y}-bA~9GgN$Dɬ`9b2 0Lǐڰ(%O0vxUXx A:I9Iqta2AKC"P(st)ze*z.I J`\%Y y"FQ6+֪( Yp&)!h[`)hC',63BLٶB5r+ ]9-\G!ٔrpBBiIL'k"N)ɮf$`p >} 1X . 3N%b@w6Jpl6L/iJ.3H҂UV_ 䚫FLG,:ZщAwg3]]6g:ܑs w>;G-xV'VR 4BhDyN݉',~qfy[U7lŷ5l e%@43N)L fCeN"GT]ORXN#E#^~n)N1N1p͓%`٥*#}N.:) (E2ȣ9vN#}kNCgZG_vhպ<̼3uEtOPXPQAT(ɹ+4sBc E5y%#)vxr};Өz:iQFAqJL > !aʄhBfGFa IםiGaXæ[utOq9^;mTv^ә|RL#XfR`QUٻƑ+WJvv,2n2H2_6i4i %(,I=-Z\nzn*^UνuM G1 %SF}o:4N-Z#qHdhQ"E=m +ėrxIϛE9q7axa oL",#"%P7>:IB[lQIu*=mA0z?$a 3*c-ir"W:l3Qq"`UQ Sxo-ՄJO/q%6/|kP i'>/R(N !z)E#ou2\C7]sZ a5 zd6/j^mk ew)]%hVc~uqm/]pi.nkEѸdTIpd6誄p[Ct?۞m[w[:s[ڤliRt-VZ7O1%՟կNWCs`P d l`OsLLJ95-ڼ(EBm3Se%oo+JsYLz9N_tE^Ij2s\M>sq7 N'rxUߨEdZ^/n.(II]ԂaZ^Yb}y m@Ne9gDBi`G!}'}b`4kË;LW_ gMn(hPyrs-.^ẍE>NxHUB%;|SbHE鍋K#4}p=iٱdP%fʈȐOk?X < cE9D0R0lSas) V#&=e]*0Jهl}㠲뻋^$̼Xrͭ06Ƴ}*0ezQ_}Mka<$a_gn2Bu,YY_Ϫ꒳Og){S>£Hn'ӧ7:0)Lmbmu\ku f%[)f0.*`Ys7@t5Ϊ){͜E}OWU>8>TIA" 6 ضڜ^z0_c &B6̗u9hoƠnr8]u3|/qPG*&  lhzirXM'AC~T'W`OB \%k:\%+W pSRprBpKt:p̭N+p䢇8A/ӊZ Iӕ*X2|^q~+ǷCpbF}M"o_?0Nf&x/0)KEۇ߷d0S'\~22Y+Ea:Yt!L M'W`O \God%{+ Idp''kEURNWJkU>)'W\NwZpUMhrUm^K|[$9ri1z7E[:_ xZQ:ڇe5ҞEeթt޺VՂ,$<쯩4#Sjw\1)8Ay3v=ٺHxuY)yjLø?x g[8`4Ity7[lm}7t5s͗(z!Ƙ̮tk5׏>~P0!D!$J[8"a Ԕ~~AOH:>ْ_O>"6FƁ ^,@ )$"7{@Cps\q ohuI]ŵTJ'ޭ󅹟]7$̟?v6F^mI !LsE!F˫4*k@Y4͝QFGRUtd=o`O'YlY^;u |'ŧv޼W[p^ڂ#^URί2XN}.%:kT[g񊀳zr~@X^ 颚J)xȡmqR,d<х5X @M}H/A1Fp"^4\rNa'[p#ț䐣 Dm  tYEVYGCJ%Ze18sUDl ha[0,I;Ԩ|͜}*\gws?v~~9 q/{o?~{E$^'0Fbzk7*mvSPIQ;`o尊W#SX9`K. ,]axn䲊֧ZHg ENCSR\R`i*JFL2JG~ˬv.堵 +5Z|^vΗBDp0"h>iyR JQmVɹBZ2;U1[8Zr{{ E{ϯuRe6;]t1DEKCs5GUۡѨ_}}a'g;;6Sj,]ExG56 G3-ep( 9-<`1P2z*yPG @OpiߎQ[M*Ffd ^R191WɇzƁX(gƒX.,J6O̸E{>X ?lT_Ph8wFl"9BEd`"#ǠT 1!$\+tުHT1$gxTH'K!#a:0- 92#v6sv#;EmI=}q&i 5hU3VJ{ Rk䨤4ȵˊ iUCfxю X 5r#`|`܃=JirWfnhɻ,FHȤI8XG!1pz ",iP΁'MFbR[: ٍ^\,fpu)֙23.{\|9e th/bsX1)WSDZK;<݉wfʌ0<<- /Ԑz*gᶚ#wFOb #WiuIp}d?bD(G貨ԴAeNF],Ǧa* B4/lPRL,ȷlpY!3r7e)!H˰6H5bSM"4ϕ; FNrǣ`sHJG,qTZݼSֽ'ӡ+w'oSxo+="kUvN=;(v`'cNF-"KةE$kWv uT-"Y5y^-]"0A=,vяô`116iQh 5(O+xpT.,NrkF$hUd ^y-8,:$Zh׃cBҎj<u2( -J)'RHD#ɝ7˜& uɃ;pD-xE =n;KX>e5rڶOA Qra/6^|l =U#"ᷴ'A}c IB@ ,z#ѸT(!e(8HG :'@ ]lIs˙r~O4xY丵q}s|9眇![ך}W1IŴTZz}$"# .2MCFu-U[w9ҽzIA8FtGh%J%BXEAr}x|mJK-v.T)AA 9e%1<0d&`r7l= 6"n@SO_!j hn<:Y,r>,ay@崈5mƆbvv5w.QDIYh! e@(HA:C_Dy7TfwfYd$bSe\̺U.JTCU*;T>@NJg=-,!YlT*c运QVjp̗dTTDw~h4uB %äذ'j6u@eIzraO>ɶy< ~j(3L\8g\xEieɖ:`oD%]dꊙfs n-l]`juQ=.|U_ͪClY`%D[{=?y"z@-|: 9aomϦ:X+1&x5_6C KȅuI= U=micDqٽm[g]fn>@Z)!BN(TJ+ A$+  )y=w"_gܮzZ]E |ĥW`)7- -J\2'S ~lpEnK0<( i!ĩ\Q[K%ʬbN0p5ىHs؎}qs`Ϲwͧ?Eo蝽 .Qq}XKI|cx)‚k/M:"2(r1F< &W}c1,>{쩕5-s}}y3i66dT-X6Kef3ٮN^`&>R"_ `P_>StTrE_VC5spi6P4#46IoKP(ߔRI캀Rg[IӳWr !Q(H"o~vCUΏ+I4^ [^6-:.q[ !3:A8<Ī+>Hjˬ;-U%! ar[Qo:~9@a)m 2QqGÍ˼P;΂NJq`YsoCߢGRTA*^0!䓲xˤ"% \O,1cU,_~۟h:d#iG υfN+LZKjp4Q(ZuHG(4qZTM.4F*.P RA$)v?׀Iyp}|3W Eھ_YիfU ђ|}jC.?5uue59q50\QynLw95u|V.O~]\7Tc5kbn$GWr^ `2r@u w !GR$nچaH:&Ma2SLiREB1G7žI:*#G]NmԖ mըeKd󐻾-a0M,6uT S<ugIxFkd?8_!ϟ._2s/o.O? , d66VXE(oo~9|hͻ[͍8dh嬛?`\#r˸}\:r /_O_Ge |/>5"e+IWl7}ȨPk7!J=OB%G7xslY,2? QFӈ,"pT"b@g1)/t6N;/dV0:-?9ˀ{GڎZkQ<`3<\](Fޙt<ܡVQe(4a;e10=(^EBRYxc|(Q} 9#Pa{"N9EjE[Ff  PAXTz"%Z*TVW YI>|<1bTp)%%ll߾|Wuh/1^|: : KER+J`CI-(=9 c%{pya^0S !6c% HJEY]HT"(,H% ER"|K- WDM>\FRef]Ă凌9a0IKɛZt*|M^xAѮ93/c/zZEO/y6#H?yB%> HL MQt8Q1 pU/y:;Fgڼl:LDІ>x*-5y1AR刡),wB H3039M_mmբet8de!umٍ.NdGRB Li>"ť_F̪FㇳW0>ֱD Q9L.,O5> ݆AKsHcߩp(}7xf5KhϮxE< 3*7(-oEެ}x'j3ԓ!Tf +^<%{JRj%8!Ty{ E4f[1$a16欤x'"m XFL.]Kl[J5٥AfW]$C\rBŏG PJbܜokz$IYޅʨhoaQ2ζ'P-YPo- ] ȟK~-=h^vA h $[ \ycAxWAȦ\ۈkDmv7p3zv{?<)iJA2PiU @]'xCkptR:#A8]=\{l~+Q4YGUTu(gd|Ij/spZ[ 6oc>˚MLYu t`en,f~& F9o U|b -aIЌĢ7ʠo5 @ )E  r_:qg/ľx.:y{2)uI&PJ$r 7:Kh\_FPZ9 hDe 8+Hx/ >ceNnߎ)'yϔ'Q6I܎Nh"L',?|dJ!FUp̒0?Gk%S_ٻ6,Wc>$| zBdɫG&=ER☶(;bH">usϭ T/BQ`f+nCOe ]NMAm SB1>vZj5{K|p\O}LdQ1!/('C뮘BylYғ \:_p'u)cbKv ; MP[?<t|@o]V8ra Bu?A:˥sz?~|l,CS^_\ ӫks|߂oy}\qXo,x\ qOKn߆>Y4wq_g> ifsJQ1>KmRN=kX=}A{{Λ>bk:]%3 ҕ:47! ] NV :]  Ul-M`:S@y'UA̔t2t5r ] {t5P9]/ x}G-ǡPIAW]鑮)GWW0ZgJgtʑR 5h؉ZNWS+D7!tTj=t(?+b.L`pMV:] X >Ebgl8sbLVb8v3LthpE&C<r4 ɑ M觤*E-W'sGh?jz'HW*fJcv*l ] QBW@kE҉9 *_!+Mpe286@y$*U+w:] n2wDZ:bpd:=>0*L_j+Vhs{& ep6,lAWHWz:7!Nי8ZojHWOߚ <pBWmNWe AǕN$?x}l/O_\*]˿Vh:r~z^/p)cR#г71wUʒ&HtlXܝzC]NlhԘ„6MͱcM9WWwjCa5n͇0װJKuS& ,*E N.ufjA n#ZagZ`B6VK>ׂΈYfԨ^m9E"gd#Z6=msHM;syZT]Ȗ}YJ͘#b{ҧ Ta3;' ٹHm|.RbZ䔌hUmmoV74VC16Wh6ױxr*øl]14TBȥUS=h/k"Yt3!5 D;@ h@~.joRin RGK{ ,sb 9S,<~kpBH?on`GTNj*ue9ӐJJmt\6'gl ;@l -wX7EiZNyJE.)$8:COs|! mDiN#\jH) jg'$xH,8dImZ'*D@F'+5/_d` lHt#h=u5k)y0f֬/2Fثe!8dGoO͢ZKͶ#oG0L#LRˌOh@ZE l`H`}%-ϰEgꠢb |*Z q9tաķ͊#h (%ʭ2T|%KHm0ytp G)yXB i И]U {K& [(@=X#aERگ a=g3V"*T+J`ٻV(ihOA'W&^b OvokҹzS-RcK 2!Սl p$ٸO +Q꛱v4**SLBF}J+zɶr3JL e2aݘo9Q}@ YAN]8SlFtQ<64"f#;&q b *P#-%J u&T f3XW ~3XGf[~.2V!joL93E@#)#mW˒Qw"=dH_(6=/+HŪ&l.^vUuA"z߶R%%=#yD^n!G3$$KH l A >&dY$2DbQAvх/U+czhMHӤBuf 7(crnvm 41b)"9yhQ1&PTԔ!N֢'D_0v߹1Ƽ\]֜7cQ4^ւkUнԂG+^uk6=D" 1T^F#T~ YǨd >RVH<QUF2|x蘊' 'f(h`QBgx;U3Vy6$y3XE?x{yhd|iQ6\` ;iVsYSq^k Ѻ^dzRh4X&fc,כ{#UfĞ\61nJȀ-J\SP.O(7""U3h{ Jd#}A;8B l3 RB+1#K3HρOlp|,o06` DRm6Xwmi^ .6I`fc!dO[zرۖff iCxXE!89 pᒹ3S0)F 3 S !0,cx)U8 Um }g p lB׆"V ѳd 1$7HaL렳x>qV%xqp$YgdSQWе,wîA@ + TS]q]JhUx ZC -|7X`UŌp-+YAhC3|Q+}!쟖SqtX#;c|p% /73ؗ/_>F07o\Mh|hlt4Ti;`QL4_OW}@`׉X̮bN ;f?xpO̿bZbg+,i=[Li}>Sp25y%@ȫ]u59@wB -95BwN(TsR+j+`ᚔ@CT+,)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@UiTNJ ͜(qBAY@-H 4D%^xR@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)B2ΒjQ@ R Q  s"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R W k,'%&%>%S\+0y̓hJ %%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ Rh8JoR8[9bN&;T6y/.#3.`ejs.Z.\^ \% ;!++M.th;]!JC:!ҕh]!`ϳ+ Y.th;]!JAt5D\))Xs ]!\as+Dz-t JHZ[ikԃf }t \`dskiYVp.B(]-'Lw?-ͫ :g]pQ0;bv؄omgM#˅4(irlN^%"B2-De Q\ ЕvD0WpM6kv9t(9 0ŀˆe]!ZNWRY${XK6f=V#{W#o1tDkCWQ:+ty1]ϥbeDWl rfr+DD Q!ҕVi]`}pυskNW)*]`T6tp υt($ ])%,BBBWV~ +ֳl~+ʹftuΚ|XthU!?/j#ops«?>awZ@5 zW'g4O SR3U:Dklߗ!.-0~sEWgj4ct(%3tu>i1װ\ ާ JJ$]9uFt 'DVBWֲާ JIIC+o*#Rl lA@;]!JJ??:]ul v,V([b+ށ8c ֗]`'l6tp̅}+DJX͈nl z ]Z+{]!J퉮HWjdFtbj;]\oY.t%a%7] u/qJYx>ǻjtpZ2dF\ cmN8wg6z;iLN4BgCW) MZsC.;tM 8"'23++b@uC4z)xN X ]ܗ5!ҕֲvDg: ]Zz(yW?]v3&8h7J~lnhqn̻J]=sOWµ&X;PJGt5@1m2+̏ TǺJmHWKoxFt%bgCW\ rNWRҕ2]5 \+@k;]J=J3i掋NkXyc薎.v -*#[NCѴG%"Nl[;},C_BlW\BlD{㈺{!EW=O\_'\O3^;Jh|4UOABMpLφs̈W9Dr;S1, lm!ҕZq]!`k+IY|RwЕ|`1NKwlohGn(v@W]eDW\ ]6tp Ǻ J8J(̈9zG7*Be֮^J 8Npu>t(v0ՋЕB;]`|6tp=PN+oV,=V-m/hRp+8+wvƉh[Wpʅc4( v gU(lJt5FH2+ˆnFkvV!DW/BWkrJpJO*|3@keA@)ctgDWJQz,V4ꇡ+Wcfu?B `#S"Nhۙ\/JWPz޳ ZTRDWz&#EWW\ .n(% ] 'Qv/+ct( jt%q-8' +{ﺡվt(' ])v& z ]Z/d Q']u6-g7.uP2;l+oh*Z1=:٩buMZ !Q]zV[G|]z$\a҉yM:'@4y͋95Sb|X6-[, hWE\N&C[?st+^ÇzxO'ӪNZiWT,e c& m)?7%_6y!L-5G7=1tm~_.nyajW4weCa6w}z庻 zM S]̶oC`ٛ_-8Vsx 04^R[p?R<ٚr/ LfRȊXANπmqP~&4OE.OTcV[ӝQgU%weB-+H]Ld.`Z=24 Mg^в8u&k0ՆQ`m>a;+9滫_nz1~`^._6/m3;ot_WB7Nk|ReX̦Єۋ xh9܋17n̅s/nVV|g[xG@O I2 pYK<Td?Q[ x#nor< J-5k78IV3׽G0S0k6.Yy+ 4 ˗?%n!l1^,x vt4jj80mQ@!jG˾v籔][`J(t>_ra o<bg+,h=[KLi}>.g2/7Gf*GB ף&m'jh́8Bv"DߞtI[dz5= Tn@g3E~"X#K?_#[YbGbUV(Y̸tYCVzJ@R6 SRYZD^sqvfr jo<x1.nw"\OwN3BR\Q{{1w}G߅ioժ:Fc>XM/?LWO"[]I2k2UZYK* q|nS"~yո8h_}.>%>}4^ '#Ɗ֩AWwA]XiNJՅG(BQJ.ȋŋ}.J+Qc~IޓkvxW͙gj{ȑ_ѷ6_,河;|` UȒǒIWljI)[Q8EVɪ!ł&ب! )X )EE&Al$ Z!,tTGƉ D)z9T΀ב[ ] ժT{N#:Ѭz=f[:Mg&0-_jP=v-eБf%Wmtt@0ƓA>QgfQEKDwߟ\M7_s p3x9oht2,/ I.\׻r˥t.}6•:2hz`T>p؛}<ޓlq/zO z>pOO@y.ݣB/} -]yqe4p?_ܵZL=ځVjJϐ /;_Q*"q&qITY=TJEJyΫGLd9J+3C.<Lsb^2X^- ha:AХt3D  霹VYSq gZDZ9~{3&}8gߦ_/ ,\o֮ 1 *)Ÿ0S#~04&ZACv6x( Ja9F!ֶA8"3;#/'{z1yI89<0N<,j)y1L8†I$0e]E&bY82B 1zr!e:RM=c`Su:=OIyh 4 d(J2{Doo,8SBDM9B`uTz >NHp\]R!|jonl;1; @0|EbQEJN%oLE: H+tU5q6Te8hL%BQ@@p9` 72pUȤjkj֌J=]Xkf Grօ…D&w_nizlqo M Fǥ=Vvq̤̂҆x HKCF}%BX2V{uӶH'a(d/`2F!EQdT4 `3q;|ʣ&',YWYcWgƎ!PSfǡZ[V⬵vk+k=D#˔0^ds$XYV>ؐ#D !WĢDK@!H:~"d $shC4V٭[R?)<쟊F6?NE#ʳFK=:9b %S-27S׺B^*GL$Qmv*MPt*?BEr-Fd!\1 U1*K̀CAЇBB hD^)$E$*/j^ 57,:V0ewߴ#ls3?Uluu?r/]Ҷ~ ,84Sy*)r2u488H9..X ܰnz<(},@<w,[ ޙ:lNu`F,&Nt"s%IH1&V($g~U1)31IY9`'.(F('f"1^vE@1e &G6Og)Lzх z519ўlM`LI.H:_y`9a2Z椬;츢",VZh.+*pߺ UuwpZվbg{q,gNEdxENF(f &MYdcM"eγL> h#!4 X+e^2ZK<'*p2&,34QBr Y1kz*%JkA#KK@Nc![ϕҧ\߄7=g)iewQJ^_~旋O/;h]7';xSRY^n~UO؛㔻MHB2M_Dx/0ރeg\2l  &85 h\0u|OH^_7kaH*|!11` 61pv-ą/4naV#CA8l_GLe/ݷSK&J}mZs͛eYٯYX̖]Ǐf.ţwd'J4%*&}[{ݽek~l1C{ỻoWcan)u?̃ǫJە͑p4nҦ{Ko1Y1'xwOJo麫ݍݬNwhV?Q,V|4|xg?pu*^kdz*^8q)k>ݦ{+j_h?5jҨ9pz7W4Do>w߿~xwdqD#0Tr7v2' `xVw-ڧkWgGN|~-2}˷ܒ~ZvIo /q*{_澱G9hYZiK,G d{0MzQ<͚hrkhUxGA{ C~j^k]򣝅298I2s$ pA+J2Qr93B WBq31ՙ#Ul]=_Oqē%b M~*C%/9Д'7)H4d=T9To͉oh7xΉ,;]y-vhŠy4B9/j#o7_?7O_E'Co $9+0Z\΄6⭲xgdBfOӺ tĸ|he{f ŮV݄o1/CGhw4~qHCOzP7F4FYY# 4>@n ̏RgO"7A[J3JcܻOzw~ͦÛ/'SwDw&@Xs#rU*8tJg<ˈV .EBJ ;aڷypi7,bܟ<#j FH)V"#YoC4˥LI֠08$S4Oc2. mtgCw}ݍ$hyv4/ggΓB/sRO?ᩡ{uHO%bj~Ys:&Y9w۴|m{R>yvڼp;FB/sH}^`p88XURV+*$wNp{D](s8 ÙOwRUe\:yOXRL&,j(}cl2̄C3eÀA͘eS6:q:߽K<0yn.dm43ÜA^"+.yYdF|o4(H!S2#->'{^:M̮]M/<#s;vL$ʏ,)n P0iUpHP9҈~# "Xa KčN5U>_ 5 zqƄt, B~Vǂ#Ps2 2@-rߎ[ø>4.k9i[SP3 ZWԌ^KP3TS5#RA0122d=ZbXM.4JskC: AE%伥kܷ02r'9N+6:Jb$Ϝ&Hyb`ԸJ5M pB#hXrYaL%2%26m7ѐGgY)gT;ip^6%:D|orv1@Ťb61{p5'Jv2׺9vWcgy2%Q3.-NBw:cD&\Iz)tv,xiͿ^?Qip ޴'P@w+O36c3>~,Ndn]ǭmf.{XE En"Qʛ&X%QYtqt?Pw~7hQtύ9_!JA DIAd[ snCde*3ǽ)wWsV(P(ih6Qqʅ +d.0}k[?ۜMΎY8WMJ Vyƹx˔_^}8?AGy_Kkh/B9 ty<Lk!˒vH>3iJDx"P[^5 #"KX6G#(.GH96u]2cɔd20aoNG*`bzJ{n-;?ӱ]EAX=uT NR"% ah%)9h88FG6Lg4g+zF|mQ$bJYЈO8pmb2Y{nHh92,K`$O?!s jw;r6qgrT]\ #!kYnr IL1'X8O ۿ~Po;mbKp cDu2ٝ?Ȟ> Nxu[p_93 GMYƠƣI%&-2 ݁tyَꭞ[;k666vRfӏB_YPr$^e>$Y%BIq1NGZZF< #ԱVtcY.~C?:i1H_M3Wg%aERӹ?_mCS8ӏ7ΩͰ,,fW' nsER"t<֕JѺ.}ZWM*g nGί| 34(3hبPH f)x2bn}?xKHs׷Drzn m{~xrҺKk/f18nn[=GW)o~,\?].K.v3~˃^Y0ܪR-w0v;h,V_ uħD]l͡ES\>_=Zl.ͯo?O[?z `A\3-ehoF_@]onO@?~ۙov9N3??tz*-,/+{J?X|<__X3G#%&CD}u_p5;óՅ/,i탳D7knwuGfJ@ ַv]d٘擈>5u۬z,'<7ͳs -t6"R`q GO3PuKIrBz|\e}>B;/ӦYa<TA D^+P_P\-09?ݟ'byi·yn8OyF;Br jg*4 ]UIrLZ'iTiظp6yUU]E"Q#/<WR[ ++Ț+ly="-Ra"fWNqk:og͎B5B8| WRm #jXp";ܞqIP{V=r\z8WOzWP\\Y HCqu8Z[ ;W$wK]|"F\ (N W(W(j+R?$vW "\`ʅW=~,\]J= q۰f/#ӏmF&Ѐ钚B^zl7ؼL/=jU:sxoݺG) lL:^0"~ ƩӋ. "iQR%*Nv*F/T2dK6-ـx͟'m^UX2WR݈~z{$( kQA|* %EyVZƚ}Nb]; *!8wa/0FkPY̮.sb f0p\^]&sUk)B&gղ#^x9+%y k5FIgjC0F7 ,աCZ*ZX%Wr]=@~aU-  ;*qG\Z冎+Ri}Cĕ1JBM(XzNِW=,\Z;3̨RP! 6a& ժ}E$RCĕLUu(kVլ]} Ҍ׃+c՛cf_$ݡ}㪛Zn*+WfS[@`ëɵ\Z`j"܍:@\ G+•pI] HZ6t\J#W)=+[ H69t\JF\ 3ʲpǺ"j}E7r#݈fy^YI~~/ r`< ,h cEM*1i9s @އ&fGGVg\91/_~wy]_n% "lzq~lngNÏg6R&ͱt޿mzV3oD0]M(x u"bRq*%_W15`w?nr5W ~T+ǜ0"\فWȅHnjW_ Uo_hW+=[WԪ=qf]#ZRPP{nrDZZ+RF\"ׄ+l3E.g\Z)q*#WR)+$eǺBFW3D ~2(7qZV+쌩W$A-R1ᆎ+R9h#^WΰAVjIkѩ\ROҷ%p0l&3PUBf jdک1c;25PZFZ'5PF PMUOX27MjVm]tQRq^2% ay?ɭ ՃU!f>yrr!p`x#܄6>kJ2-vrJ؊0M_a/L\ z4U?C*FL j: \V++D-" WrM:H\Y W(XB=G I= WVo~J)F\ άWB3$Ԃ+Tk9 WRW_ ܎U#`\g\uS+vM\\WOzW(gM CvJU't5"NIv"J:@\aWW+ jpj J1qwuq,E+G#An`!yzId\E2ܕm%v+^K#J%w9sԩ>=*E "uvC[ tշzN.U11OW?_\pD}UWW/˗/A?_bۋAhS@QYρnG&c0:d?_~w\wWoN:*߯^qbm,{r{:iC!E=q~?W|vI6x8zgs {Sӽ??s w:W)qY7#a6>~$d'n+/?-,Pʏw?f/薉y0eQ~A|r93y 5z#/}߲A[^gj ƿn_eYI=#lyѵe)ڗDުu^WY%*g>۞_#$ߡr17h[$g׽;~@m痭r7#T÷D=*);kZSZQCͭVB2%ұ[R ]sV妐ی3.R͚aq`4j>/=NǩX4iЁ5 }P cnN-e*:6gBIZHH~N;"DKԩQU FD]40MQr9C1~-6ڔ,}n o.!bi j+0Zjf%z亦ܕ)aN",= t&3q5sƖjh4U4jYЂᎈF(^?zhnmS1Jt@=hÍ`%cM!:F20T`)MB11ИU6F 2ϾWߨGJ<D^WT/$ǾȵcU 1Wmh^u*L%;KT}?D> R9 A:^yr}4xNuuhaST{Hdt]NqN>I5b'-ؓ9fqu$~`یU) 3!9*_*X )%$ڨf)@_"$U*֍KA_ZhI5jkT 9>$]^k0 !ѿT(`5R!pPBY3' Y,kOM2+V*{M23._ўz .US JHN+ zP( / 0g޼ȓB,UǑ+Ob f_b= eDA!۠mAJ<.J3nNŽB"pc2ݏExg@rFEK֬kdž@qVH Rݗ55KA2Z姣tXWHgYtggMU@HhP*YxPڀJ5񭷦:x$TIf-w;$aS6i` nQ}Ayc<9KP07\qjh}0O|M7] X2_i~\ϕIvw`0u gllfѳq5eτΪ4QG]Gj^kIQ3󬑲FhQAi7MKo=H#tk80)QC^"$u-ҐQUr#a?EtNJdt `%.(H H,@z|!(!=G:T}f=. >.`E^1H"^S:Ԇ&@u?YwtH^䍡"UA  ߡ0ˢ#ɡb$Ucb<RuN` tB)1QFǤz54 hc3(Vj|ҡƚ5AUJFm:3Ag2vT VPXGĻtzG&&×P3xs9G],z!@6(=< VAP4kMyQ6.5(}!Z)AH'k|Tg= z66VޥAX2:a$ɀn - Pilp1:?kK2h4& XTdǢhBMF") %;)Z͘-, \g'b!(E%蕲`j?&zB.ZܲK+m]bk[шv$>}-/^|Z̧5f\[}]tk(`''%Worw{4w knl_jڞ=MxW% 7ovho]loO|cK71ChAonS8Gmܭk??z'kC3}Wt-ɸ_m!n\̶@kbgp]l-qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8sY/@|. G3FK@@8(!cH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 x@CY'Íf)N Q(NGw?J@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N?PKz&3]'Tr h>x'γzN LыH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 x@/O|z⒗ran\a!CQ-ɸD]6o\Db\z ƥγv+%x:<P^M/>Zp0|u 'g?fw8J!gZ50|9({_nRiIO`mh14pm\ M3;O@ic@( bQ/{/bV?tb!]EgLZ] ˡ+R_F鄮#]% +b&Z2$%tg+' =gW[ ػ>th)?ePAOñBW_2; s+R huBWMQW@WW(GHW6CW ](r/b:]cJ1^ bi9 pZb:=tbV1ҕS;دUa_me+_1xa%/\g訃ccCy΄scivw^oޫzq/ֿd{5}l1X;0zw}u1>]߾׮I48G1u ۷)<]m<«3&~ޗcI=c? ;Cjm-kɯSncͻ&Ѫ! O1Ei6a|QɥGýB6 !Ӝ75b-e=L7kSp1z❑] oG+D`i@0uI & aaTHJe5MJl!Ū^}뙥z æi1(wR)b*4CLA/9ex*h( - u2Oe^ G!pӝR *Dp̒0^z}=ǣ+Dy QZҕ+泊c]!ZzgPJ՟]OjNa!hמfh;f(۶6+c"[ 6]!\+r+@t}:]!J;:CӟM ,BRBW3+A9YWXSo>jV |A҂S&+U.thi;]!J-::C0M3+%φ<B eێ%EJZ?3t͆D%']#]Iճ3ՠvxoea*DTV*DLlMHd0:F\e_ Ո \ HdD6vPT\lD>A+e..6m|+"bK*)k}RGɐ!}e8L݁zx4 'pY67>M!Mkɤɪ$fCWWecU"Zyf@Y?鰣+#T9-/q ۱ J ]!ZU|PҕUD霜AlN~j\#d, vBZvt+~`>QF:fpH4B+f(Uˎ# wtخJ=tʆUvButut4JgDW|+{U3\J;:CZ2K3+̵Ɇ <Be֛ut"t%1DeDWc$ rvBΑd̴1CnD+-mPo\jK2i͆ƪD1;Di9Ҵ2fD0XEO{]ZAt QrҕLRFOkWgCW[Ww9ҕ0cXW\ [WRwϛ7owazVpeZ^iP̥A3.2mNr8] oe_T(ƪƦ~O2t)KRe嶄{נxU۪;+c'>x) vH?ͮ];>|`tǴ~J,q#h7R]6jSp0I8Ft(JAL4NxAXxI(I$$n jst kw3 橈f0 Wō$m~TCKIɒb\/(18-"QV.)/#uw;+t<L'rr4Gq~"R Ԁ֏ed*زLQ'H)+IO̸O^ClCdr%6QPhMFs/ފJ/5hWo{Co0ZnA^@頶Wz>gaP.TN (*ƧWg6KOEpBP8SL D |3y OB&tVͲޭ͌CD25UƩ9,;Ke$]2= n~b*|^+ǽ;A0$a(*[5~ZӛIQ;gM>X62gk,94cozS"m-᪲)^!\:#c[Ҵ#U/nI| m}Y/Xv]W\btim c7-pT̪Fz:_s\? Ío~;_K"fK$wg Mzw0{ͧ0trYc TC H]Sg9?&nl0[RXI׼Mp9R/oG۹da3gtuwVy_]t;*i|*Qr+ {o>Lf$*فqčLog){Lp`Q/up~(M۟msYg1],WstMV >FT_P2njy=71Gma7I 34f뉑۴݀5j:YT\oW(Jh%LDѡ??!*$葶6>V#t\\$Zب"Z:}](fny<(n za4=XC|v~yl㛷_!e~p%9^:4Q҈2&#S.Z#㉑d=jQrb:$҈+*a908S1#jOH1pV:A]`GG X]9Xʪ f5q\K:NnyC=ӍL,z']%X<\@C!D;Ac峯[|߻@р@I/4Tyf(UG{VUx²?r>=9Ԃ{E"'a`jV`KeҜ !pw lt'CQmq|Rԥpqs.I *k6NGjpZ-8JLk aZ]jCxcpєj@HKZVk:+}&iiMq>`=D-q,)8j/OA RY0ЙTD8QZ|Jfˍ$$[:i)t@`$M=XhҚ9M0!jM`,TGp!L%Zw1]D^&N+9ᣡ1(v&(1i_ iD^̤Jq1OX0ճ,f{n'?*~U-ZAJqjE~ў*CJ"Mbhs9iDp0DC, _{8<|.?_^w" 2z!ɻDRM ݮ x 6 Vw(AiRv:Pw*RT*jPTpKFapP*(rUS}n,_^\Um~=q#m(mNV{RUo_^/.V^ OX:s~MŲmݶm-8қL'_#O޿48M.I$!wtS0hcL+~%5vmITF/u]}coJFZ,X|3U;ϯ7-{Hڧr_dCTL2dFW :zݿw_~eݫ}_ "P }jX?> OIE/([͍8hsZr'{dr̎FVrj޾^^j'Z<\Hד؋l旛YʢBGr+2Vb[oxd}PM8U's +50wF>{tr>R`MMTјh4V2-59Up+CV0AK5X(vN {gA$!sTǠ4(U@Y,6nN(s:󴸾zVyߦqĝ890cGO1 H9)| fBy*KvUr,WLN?ֲ0"ZHKtRPe!b25N*uKtv3m[CE{< Y,uib |DqNH5Z/I)7pOq9uPFk!h Q94dRT+#Rb-oe+qa2tc茔Cv} Oo0F \=bhSջd8:lGqhWqQ."\CϓMol& J_צ뒲a뒔V\w]Z6p%#ڄEREPGJADzBRp [5`NNq\Ӧtxj_P/鏿ߔMes_[y1&{@yYaVݏ6O:u*u&uuk1aZκ")yH#lg=X3 wwaqf [>rVoe`u?ҠәIx֎#VbV1yd0At}#M+yT&&<z G}:u\ >MJ!²}A}O=Z&"B \4tEp$tC %:Japy_tpd.:]!JdDW'HWp4EDth * ]ZB+Bie+J3]`m+kE,th ~1H(Ntut y+ZѺS.AֻG7-]^n67dWP /Xkd4?ZhP8s=/PJ;ʨs\?M\*t&&>AZX";ώ(2-OTp|_tEpABW QJS+kآ|o$teUxWV]&y+gx@-CWzK3fOmAWsu`'G7plP+݁t[AFDWnpNW@+J&"BZCWD,tEh NWҺDW'HWqfDDth* ޻".-O&UDtE.k,tEh %DW'HWHV[9,FxyTvX@ a58z97V1-tp4]52lચeN.Q{̢PmBWl28 : e/-rfe jeW'}աcou "}|d{7<č/'Ƣbй䢏˾,2fTts.K/2MVL0*HM5`9azgnj'nV77qJM]ijQ ̜ykE*lOBE•8̬f:SY.˘uQ9л(s9?`%e+e!YU)\V@p(lt2' a,Yovs¹B XjMhh-*VBZ9ZxЂU+X!g0}f$ S̙ Q )] ]YfȻ"6BqC(Eڸ9Er\(0 ]\b+Dk Xˡ+Ou9]uk/wCkDWPte]=U\0e\Dth: ]q9 Q':R,"BR]\%b+B{;P:J ƻ"چNWB+%2"RTECWFB QV}DWCW !iswEpU4tEhMt(Eh]=]VNgD f+ZCMt~kiiG45 ~GQjbvHFqcb@Vh* ]ZcB+Bp$]Yi?G}=vŃnh];RtcDDW3 ]\bЂK1CWvK3R{Wc{W*vL#yWPJte]=U&"BQ\|_tEp @Kg+2"B`X4tEp>;]JS+]!` ]\c+B 6t"F$:ARY"+ep"B폮>;]!Jc]"]Q! lYӤSB,<ө3u,jvMLk2S-$䠫]tkM zR{wꢲH{Ie,A}AC+]FpW.8i 3tu跑ϻkh6 &@pU4b%giC |&|BV % ]c]2qt,x\>3\NWRDW'HW1:FZh &b+B tB&Y,&"6\BWVPJtbm)zwƜTZ:nLW#ǮGʞ e+ It:ЕKtTsg v ]!\ˣ+B+BtJ2"+ұ5:]JN+f,t"")0%aK?WgnUֿ?Lw+ͫ*À uɨ~܋\ܔ/AT\̡IWy^<>}媧ʹ{ fMFA׳W|h߹/꺧ȇ|Eѻ|lsz2ͬLOt1pU;Q6R +U{MY;#_6s+OfoOgqDh,%2KIh R"J!S$-%XTI8bT\H/)td~k.h[O: M#Z't4M(K4}4m"+X<],tEh N񗓤+k)\L2B4bD+xt(C˯Yʂ4/t}y)(Ob[iJ\ӻ pՌ+#v6#M'P5tG:G77ܤ\]Q>o-͐U_g3+F[tf/w`gnݎKTqy0~_ ~:8βq>x*-pڻLeus!s͍gFC _m!#Bme}N!V'$Bgm%icߗ힎3K%:&wߢS~FlV2rqI- 2|ͪNZ?1j&ZPo8A8˔ʌrZ)7x9ԼMs cFǬ 1Fq]=^-lfMpuLa[-:5m⚉#w!VGhY_8'8.}i=`dCoS;ޝqV㸦M5sa v NKp!v8K C"$s+1}/kv$5oI]5uܛޢVIrǑs/M,/[7Ӓ-'@*,*P쐢JIJLR)Lu Ykܞ VU֪seJ&j_"To"JwYqKc^9xN rvqI foEr0iY엛ECĉ;N7p(9NwҔdQcm'$TUϖ@Rq)%AZIȢHDD٢Ղ+;r]\R" ӥJׯP,?1oǓv*{%P\/]IyD4$`!bƱJTciĤ k!I $,J2 ؀5/?+ ;uoi6O_^BvL z[ya$#!JZ ,MK OyJ#l"C8 IuJG{:xWe}@+b^ +YȁJ*b'*خ85ր@1_NW~""mM\LؗyGV#.T^\-)/?8hw΋[)G[#Rۧ6#/bp娊az,%+GV^@鴮hKcFY~\}g1;ynggw0vd{}H^bM(y?a_6cWyl_ޭ/b3ޘۛ:}? =1+xA,C \ 4V\bc TJdDДבb1өtETMP;HkOr _E"m[HF⇯ fZc`DbELmO2:-z?p9Piz'r'yɦ9z# /vBxH}cKo'sQ|Έa@XWx};ZɂIs\R} ٟ̽t1Y$z9 TH[=+ c0^{95G--LNKxo+? iS$V/vҾ;և >ɴt-&6K.xM)8@01NH-ϙ4Ji2$(LڤQnrܴ~ lT򣠱nၣart-Ea}ޫŋ˴o9@>I /1`8+Uvdr^?m?IKyc- )*UKn3^#F\"cݾ8VE-(PAdzYAhb&ˌDzRԳ`VO:S@ti]I^KI t,ƘG8txqYvlSe_ A`;/$qOڝ@!Fֽy/HP}pƋ˪oGr8N =:b麟ǬY8 ԊVlp]bqjZ8p̫-XmuKMכ <ٖ*/bJJ̀SOcCNc LI% 2hF$B$%)IS ,2WD&z̀r\ -[(8]B[qˋ?]R3]f1˧cpFڛڅ?My`O#cY[Z~g9%EjpZ0rObqDZ__?qIF3bv?ov.m4tI1 ZD]?Ҫt6? V-".0;h6_? \_ے%KEѧNeZhŷhnnss[T+%B-b,_~bE@k.ɠ`zaվRhX$@Ϗ.e-eRxݘG7bz‹7I{&w(qe{\v' XvHԢenf$ KwH۱npAoaC0!!V23S~Lgr#&t<w?.K? 7ѕ ycaKVn|Á5Qv.uEEe(l$oz+q=[@q>35)Jf*=çټXRre~+M :.H=lĚ6Jŵx<Eu !LJ٨NN%n-1t)qt JnT?VsJ*K_JR6?meߐ궑 ?k֏%!h!+|LxN;>G3y&F&!R1b0}Ǔ1!:^dtֿJihm} cP!gQ $s_&DY!?sqd{`Dk&֮:Qjҟ﷗YבN `4%4M Z/Fy;;ZL_s y\厮 ,XwTs7~ JbE.(\DD gP~Dat&"E[EDŽpI2]*KJ[chWiNV='{d%Q3 ,'f"c o!FY4qSs|8ţ=:`aLww Ф1hlfVqբv[ȅ>ULwx)l5u)op$,J-eݬ[6ݡ#mu>ڊVךjm4Uks,FjX>h1^SݵRpicPqOI 4 &j>Y|)5MB6NC-0Гp$(cDI &iC2SLe0{IxTV&栫n8Fa:+>YCbNcFOC*X_[ԃ=ǂf8aR47q yJ.(a ]\uRq\NnFH D3y b32 SY/)|ZEn#3n=Ucf,Q'# ź)T!뮣gm2~f gaބ6˿ Y<#b#nrgaDYLQbNLp@'Nsjּ ^i~W&25\}oZY'z5cJ^:wRQ&Ҵ6Vdښ| \jbUm16B 27wA$1O8+Hu-ni& O+ G ɽ " iF49 RRurdRR KO \2ak-Ziz3ͧ߯W /|4 = oaVUv{\c]wb 6K&(SJ퓩4-5Zjh7#t̍pR „J֮X@'4YArpTYٹoPk 9"ܠi"OTo!y1j !K]o_ρ_ vG,`Tzu¸no!b'f/EB+n= Q2CR L f9 nW *Ri|ӈ؂8$'s-ÖYUMϗRݛw|nq-$`1nDucsB. ޾G '`Q:@fvsWme'?D͂˿ 5Za!սǒ@PU!4fA`^7 ~H73"mRMO#%b3ÅF7=&hw*<\7#FOlhΧ3CIAqRD)D7㙍ɱ4ha!|:2.;M:G/&)ӧN+D]+Ojn y b@@/ͪ՞w:?@"\ĠxR 4PExc EpɈ'HeqL)R (-RNX̷[b%uF s6/\paWӽ'g:S8[!V 1g8al+XFiQF%P#!S02ҌpcuX,8W9d6n|kb,?iLͤ$KSv wJ *gxDh7*e|1Tivtyk,K Ad,Cd)"΄Jc!̰*C@eV=BiLs& ck]c<%3ߧ jȷQ`LZ}&_tlK%L6Ӆ-}ku4ٚ7i'i?l<(*}k,(?(9"`;KȽ=ǙcA%o!U{˪_|- sAL=sʳFʎ o_df#qX5I!"L=W%PˬuZ`VʙUU\zHRֆ$Pj)R%=˧SD9evqf-+Rߦ#VjbXM]>LLH˳Jc.{>0笨 |9!='_dhsI'2B4O#u$VxI7yw~8s9>kt19e w' OYC(u|s1G"3u:%qXL9;1̱hcY_6AA pw&Us !.Ǻos)l_a]IIl(ORNVs*- EgQ:jdx+~uɔ~&\"'B|.ϗ_kg/*'vv5b2pw1ͳjآoƃh8OJpUk8㪍ݭfcEU*N/X, 2<+Njp!+a5^%1򬟱SC3!Q?[/D?מwB՝-( v/>X]M 9:IA_P?Y徘^ÿ% =&;/Ol`>> IUVxn+9E*^YL)򇂹stt7-*uiqQ9%%+aBg/GY]h NIPk7mc Wn ~DOQ>7 tln H/{H"ꀡ^YiK//e*.* E(sԑ 9:яG&gtaC>ES+4ہ\+G(kfj2u`9wP;.VqOf$)I'18'Pit@T'؉ܖb)ΐ')1_Y$dRLH[f 6\Ԣ)<. GvH 䂖Rzr*!#fA#9s*w"mk0;O悔y|1+F, @Qd+^j>oC!\dNp3fއ#ARÔUf##{څE$(w~5R(*NP3qԤbBݖ Iߘ+(btw(B[e`]/;_΋s1 mSuiޫ#NEGrAN^R gaCٞM~q-MU>_YFKNkO"\IUn-l9J.T(uI괨j6Χ/f@K:2'F_F08Wҧm+*Cpᰗ +ulU-RlBJui|3ܛS۪oK6t j9J5q!P32J4O\u@saWnAGmAڢ*j8b*2"[ڕJ~*Ϋ*mD>^d(DUWϓ^{Y g>E(aڸq-3F#6gٳYu7qe ֻL/d{7qݮ8`A.;OЏ+t }c+!{yi5za]Ҧ!zƘ0;u,=WЁI4wK76Ä6}'c)}gT`6b{㼨3\*E^|j$7D{-iWn`d(M\h`$M9Y3\zbt4ƾ@LP҅ TP'jWگy uQ`>yM#?=:Q s,쓗] RkslNP>-\FCnABxb$u*v-F6M*Q7lrg'" J;K)<XZ4V:cI4w#<ø | \0 Wa@;߲bsޞOI@"΃?K@"SHIl%4NjdX_BAi9Gafgir[Q͵ڇ|wV0P. b|?B$TuX*n|]0!n @>:P݀acOmjX7VgqD4f7KN׮b}%#Wz4Vgq~k@Zds(P\Cҷ@]5-01!ӘUƈL`Ww@oG{е2Y7mP x1k7C0_t }5Dn>&׆1L;`EXG Ps !@ 老NK6Vbxbu`nWf= . % hp`h{ `ĩ^ϟT@O$O1B\O!y<=* 0َpa=WKjC*\@ar[U&'کU$Hx%-VRpBR Hzf?R r HH3fXEd" yX"_qؗZu{ࢨkphgsFIma7o@0@\SO=H,\F\&:̱ˬHPL36SCߺk[9 d%t9(κ|q$ā'nO"cIc GP0TQr) &3vR6Ml=J>PGmŏMŸ$ j^~YP{XҏnO1Zpo<99Dh\;$4Tpj0979SNF?c^W7K0#F+A֍KZoAcH1u Ɯ3 ހ:O5`L;૭AI1xedR<; U{H ^R!qF i5IACÐri ɑ9ǛP&Plݹ//R15wWK^vy #"2hTX@6GPsܤ겦0Z{V+;n|8J'}'`,ģJՉS\Am<˴rHr-Q!DMvk]Zg׿fv}b4')R3,6H,ͅyc8.XE"i ~'O_ ehi:>̴i𥊫rӼ^=,ZiZG#p`'*3ڱzCn3$V[D!o=Y+矑vn@fkS%Ru+qhGӚwH%2W i||C .$ҟ2'@p0^nn™h ̩i׿yu*~U뇟>Q&/y?5_w\Ed.m*Qm5BwK8<ÏڽSQ;I.jτƂai}@5Xgs\Aʺ^NHNiXa)!Z5^ZKyXCvȕ(8Tp1Jܰ{`8;ʤ\Qk9bM ?Pp B˓"2Ap<qN Q @,ZAsPGh15@'L!8 ͯw`{$(H#aԁ?Cگ,ˣDsTY 8:@;NTbFeP<]V5 pt̺|h!.x`Z:ܿ"sT80hp6Dt?C2kMn̵ Ӎx}73~}-hE(x.|9gٳ=7oCX%aHRȮ^(्{0f&KW Pٷ6?>y89St*\҄`IۛG'a?JGaI/:;$gIsה[`IT7xZiڹha(j;Vkc &nrg/.IKnV#KE&13M:}p!8L]RP1"BH +&u~Aqx.CO=A3ty?*-VxL!2P4 B#vG%9pDʿ+^R"XA6u0{FHp ,g.Evh֕pIZ涄, z {7ȶ}ybꄡ!!>?*h\dXd%k*8e(-dD8 jXCtC3% [·D@b_`qLO&AegMřu 旤~Z+| CpDcĈBZK2a0\3МO^ 0Ы~i]T3L|4 ܏%l6wg1٠-giR56uHHK/TEt)=M^|>~ ,7QcR?,x`:)+Atb N$FpZA ]f/&c`e:)5''TXq$*=mKO30+%U\ NUù^ "O>%?AFaofT!]W׶4xc(XACs$YU9. bHv]N.kZF t6W,v@['=)wgfS6\p[cFRJVЙ<.ZTʡ#Mq}@/d]Up\1!iMU\ZVT7ISQP pOdfរqY9BW2RyBGd0J-)DTAg#Nsw*V7u[xfY~K] c ?/۰Y;k f \޾8_<}_M?L @< z7'7揿v3?nx1€v/y9= fe9כ8ſy(qmLN޺m&wn>v Ξ:-;jm6}:QMXfG1_<. 3݌始S _clͰDVƾ`M=rNXB3'”?GAgRjg#:i%#CLΎ뭓~w:فYpyW ͘^-bPIz~[ǯU_'p^0췓|GY3i!=,H$0`Se1Fr01fF8psRc?Xo{[FFq>bA0@6B=kz@j* =fY'T{CL"8fP5zQC3^g//Rhb^|8i7CZ7E5Wf(ҙ( 究2툶^gI?3dh/h`B@^a&,EpIF),[b>FU!QMDs{Mn6 -NnOv=9rYGo>6ϰ(Jq`{ab+AA)0) ܄oL: DPd|7xDu,KY^Y䶊L3TZhwT'ihcv:o/ȓHѝe1Cxi`[]1r8{>2^ypJN!T Մ%N㻚= @] Er XYBԉI&8R>FDF¨|Diaem7LEm\se.w/ZvuI8\EgxFy⟾С.^Z?> 6-w*>%!3.m- 2M3X3D 7R`\B* >6a2\Gߴh4F"6HF@x`ZA[ƙ78Sv7EyEcTE\0Jr˜5eO7HlxbR#Ŷ@8icւXڧ Y-!;G$.xDqL&^h;5mE 3ڜC%&tIsDN:h.b0F vn[Ka,2Gi/=՘T\>D|цa6J*hl A+[oYM&6A 9=npvٷfjvg<,JtQ]rCA )MpݎL}H0pF*om> X |˿:82*gRVndacl9]|_24L8Iwq@NhHOyݓk坟1ogy ySI![^]4PĒ9\K;~+tNm1l6^ Vm{1 IHK.+hSTNq:O]'%]6W]kI\~ .E9˾hL$2XC c2Å1w}\wŌIP@ 뤘B\Zm{[FN]f)yG K 5UULIۍJ0zx<L#q)SX×R24.-B8n*g΂[qz/i|( |1T?9Ts T Ap4NE*TAcHnz!Fkr|B RŦ)˙md'sgF;%\)0u^lvLV4J;}{yRL %l㨞G6yy6OS #gN3%qpT9yb5i ͊5`h{,pA+€ʠDVur|Ay~bq[:Fi7Cٻ6%W Ύ?dmA&yXA_-Z(%;$Hգ(j(kn]գOշ|X%X!~AHg ׈{s{/1ꩻ(̜ƻ_W543Ȩ`u`"`OF9.uc\6hB Q)xG.@a._5R2˟P] E(fJì\q N?^וQ0r Fhz /E/jWՇ9zI<%n aWT :h[/n؆d+" #s5PϡV/-J`O+a) /6?;x=ϮlU6uWF>+:Z򜓼Ny.,ܬAFCmP#UJCǧp|$;A%Ȯ]WFtvS):0["8#LI>iVd^cH\QFX׾G´k35 DUw-an>Ju,d; ٵ1`gR1L9:u3?{5 |m,|;x#Ϝmcb {5@ {|a6~2=v˯Fcc\9@&Ni-.̀_~]tB{]Y2?*$E3 57q#qnȝI[ {p===*(!)YGH>nj?nIb#S+D?W>YGk2Ifga2Erʲ^qTu-ʧJ腩_4lUuO|!<͹ӛR&7ذ ߌ~ XMJB쑮nM<ev ZmlPքBhm!BcNZ㰛ۈrS0ه&P9lRְu-1YxnW]ݑ,꾱4? F^='I޲> ; T $ %RqvS8dN;}Q4rV~`md^]U9~]KRbX_a.5FTf?}k0V)ؿVsE`SXnIlߑpI l{ Njsيܚ@,gA罐f8&U'!@w܄l+IkIwZXK`<*+} "I} qNCC\صM$rrHd)ΒK/RjF]Ac8JwVSTZ2HrqL*rI(.'s^M kz~+QܗKq[¾ &m.Rڊtr7.7|j\^I琯O/t" ΁2b!d 4YC1-jheY}<,-Ε:ۗj߭;~#goO3$Ets)bq3+D9EQE_c!aL_L}z Ζ".˛sv圜}Vv JPg\BOGbQ8בBY 6Rq'p>/|j(n>AY,a㍅ɀ=ic㋽<=}Z/ʇY2gjG7=OY.z W "vdX{)euxuDuwJ?zؙ~X3V 80mB` N3)ҽ&Pڨ'Y[qq&9i_ˋOձC$gh'=ݗI+An&'5ߓnO=(䊉h,x҅H0&m(2F}C`Y5YTd BV6ҹgZUr]Sڝ]ܜSC@O贾~C$ocY˾9 I9Y54^V?P>"=2>8etWdআƫ tKÕtHձy(MƤ֙Aڮ?fw,+2|gkKD pUڏPVfB )dwB#6vҠ#2̻1=?Mإ"s S Fڍ(hFsb*cu'-| 1SY|սeJ {>/,Ɲ66-WY4g|{0}6]zڥuub7bC"$n"* _wr 2Yu׃܇]6Q:Wp@MP۶2ʻVmX|+ɺVKC9]3y(1gԝVki;Ɠ*EbCmggTwI64]~\jC%D<2{!~k_ x=W^0 mJն]1à' /ʺ"<",o1C"`T(O=s(TftUd31:d/ $(' mhe,4I|FƝ@niƧRQ7V}B[tZȄcB3_3 xlH3xV[v|hِ) EcEga` pkMUxKcӳX)J@X]dCAq? D~œ {% 9\IcoHsI>v'ص,,r Ҏ Cp90|DKo_E1Htz1_~h>._aa̧p瓩spK8[Sh#lu%VmV@hX~0ۿ?[3N݅q|>_O`AB,wC(9698FHr5s)p7X8wVGǨ(qEJQ/O}xڰRqZ=pwL_ܳV,ٚI -WzO1?~z18?[T stjLbv׫ͨWB3W ʼn-/GBXl;1UZeDRZzÐw< O!pcM6"4 %HRel =n끥Ҷt0, J! r #e48mXe$ f !Aۖ,K6:GQT_e !|p _-dcZTkԡ*o.,-&͗(! qR1aT\;*.J](&1iQ`3&$|6 ĈT@^(YtZcG]s+I87>jٱ o @JOf1_Nfxtm䖍~L{F/)y3]WIF_j7௫?F` .fiy 0aqt!,\ǘZ qGNoKE]$/:AT궄ގ@ꋫ!`-PR?%dEz8}kO?^cm{WJ9T(pJB}0MXw6p<^jl-\55>PP:(P)ḵ??P9B#v$i&,w#JBArfE*k$=F"1:Fb.B Spʀ֥Vj(&iVd ?2 s>F N}VFTᄡU8xN !-&jm{8Vsmw=an9\wp[nk{ǩpOaQ}Ft PiYk `A,_¦[C5p3K*aI)xaB?!͠5wɵ0%Nʲ_(uiso7w#9#HXҨLBڣZJ fnQVtxC+ZK bu=* 4LAu0{%R ce $h(2B.sT>y(!:k*t@o%[N@<d=<fD93BIэ k[80L݊gE!!x`1W0}zK*pFA{aW(EAW^⥔"&Gey" ait2g=G$]+4VĢ2C2\t 3 szn BZh#I,NywlkK ~|&kRME"j9S c:5ÛDk]̾ɧmO`o{ X 6||ȐmN.%AdA!@*1e4-ݦQ)Zm{:cgv?lMUHp_@u"6,)k[^qmSX?8S9 !Hvy[}wײ*YJ#Knq~8lL^8(IJ6\α!Wc)B dBEIV#?ȢQc!b=@ omhThS û.87?;vHED ,1B48eI,q_0_x RZV|.V.FVmDD&'Wk[ɸ Wr k+W jr)ffgWj?܍cBC0}ˇ NA(Uz*1bbj"̶G .)#FJ9B79hB:eHlբ\`Z4妮u-e@\L&K`+c#Z+UcGdu/Sv}\c_תbQ0cXl{&{yY OodYiWxy1㥶)szY G{ooɍx)P*}<<{[1;.GR(iā;"}XD__wh>Y[hD^0sG_yZBW+X1Q K{W"}~oVxcr쿽;*N(~L{|oa/O1I݊s6)z ؛7~U>(^ E]_\9$o5ef!P|'TEtQ1vQbT;7:ʤ$QFu2hEyB`0I*劝JN ;x~* Izή7()u1^AH@Vvu+#ZqNCsLX{d&v$chfZ@V|e9뀻$k$%!^Ga ` w^}~|eevr|`dE*Nkk9'`J/O\W>?6Q c3h:&mlWCfMiFa'RkCߡAMVX_ RZr?xI+tg4 [A@[l[_ ߈q9u>Ɗɣ-A'F]FhNe;h"ȏ3Q;zMddWZK0F~>œp T *Wj>9:YTkƸek>ktAsOf$1}k&CƁAD~\!HbREgS4Dcnt{[}Ԯ xF?)7I7MI!yt$5JӒp*Uj` s$)UmK&SDqȎI -{9!v`{i''nSh5O4l®㉭&DzYD KP<)a\m%cҟV99u#Ta^`W`4RZ?5sc8`l|1^t>`Q{} k7x.bjNք::CmS.yTPF,EH(e  DYt.w^^J~{Scm-$ ^U9 $g -s,/::/!}mCj퐺ֿ[<:̓>#2Y@@Zoܼk(C`jՊ"ְ"-B"-\3)5/DU XԂPO9Z0꾣i-}^YVkYi*gs>:ZƧJ=:5^ 0G`SEIiV6$TF`D9[hΘ1/G7)VYhlrvozzeh߻VĊ *@JT~qpo syRH*1F)rSa1BL9P$=-Q++p!"tVkyzs]Ɯ+ZU(Ѐrrcs=!W(9 *Fu;WnJIr G,:<(H+ c;@RK݅SYwih+\]A"mҺAm@|(HlΞl2l HczR} D;Z>8S)i+"6C"o:a@AZV.F,YAޛS1"Z8gB+fY46,^~zm@2ʥsJ j]Vu yΨlj-w Uj6jI&Iց{t3ˁ7 ڥ̺׬;M>zZYgx<=[M͗C+ѼZ`3%6Oہ8pD|cD/~'#b-#WX߇;-RD3+2o(+e9L4Ka Q.`{/Br9#e-ٰi@淆Y5TMڙo&5͹<491TJ2#`XĨAXcju|˫??^1#{Q:ΐ G0z_a^V IE +^--4lT[}jy׹R9lfXgOoPu];[;eGw rqz~~ڥǬdvMSd}cWu'vSUA*ZVd)OI]EPE"VtzTkX}n^"T%5Yͮ.'y]xnHTx^ cEch;i]nSeD6^3'7h-6Xv]uz?Eu2}wq:gB>5wI":$1E`RxBCedWP T._U:rVz:BR1h1uO*킆4I!jr>m;yii;/xbxx8F 4'+xZo(Pi2ȗ=UT{IAJ{RњkYv.qĚ:s2x)TV` gde*Y6e*Y6v.go^~gDݲ!'2Fr_l\f]WKTE*B Ѕ۹d]fT^A#|[{]dF& 2hA )X I5Rb}paQ`]9YϲNSeɄLJhn,jy6ڠdŀe0⁻$k$!D:z[I|z\hޱzœgٗE1s~d42+J(g/Vt"jZhat ZiL2#rG1R ‘)%ԌzwHm>)19d0ޫ%9 =j=v}'2Nd\ɡ1N֞A7eg[2(RiĎ,1#5hrn֣h `]9EZoK{޸4$Zqc\ZĒLd}C-{RǸ1.m%cm=o#_g[:<Ӿ(څ2@zw1cX'}_ NIqC6*:\mn|?1M1&L-kvA y$?#zGFhRo-9T1)H# nc}Ў!&hbfnfծdhck@|y ǭRgb()_G{?u/]_鴜\yii|=9H`%e%ԞTG b1^LԅS}׺bOFO>0'X67"]#ܭx#*TΖ7́H1a:JԲFUaH(qDsQx=&Vɢo-ή@>.I45SS$n5 I}I1Sܪ̵XذxirE(YCBJҭi],~#ox+M˕9M>a?y mJ޴̂ /\̗](JfW) Iii5"ڻ7/[]ݜgݿ~dvuTϦ9Mo_$MȄ91+xbVٌhh)KMJlO;4Z ʅK=Btw"UlB窫#l.pyIvL%*їd9c_V7)u ZK^TӢDC^YÆ%9쮮U{ %7 b `%8Gcw!&&y?6>{ lc!O[D$qhy!԰do#3E; Ĭ5>iֻغ99c #d8Es>3tZ. n>/nJԚ#!|6XpT)9^}5,/>ɮ|o BV[*,e^v_3cT$aTyhyDsG>$t܊eױY0"`r77]\x2E9%Xh0sGƑG2ɻ{`s=zMR'җH, \L<&tƐd\8a cɅ%zO X=`y.xj/>~ܙNss"}lN?zsZ9{ɼ^D'5ܜj@g*ۋ;A@9]<=9٘"q>'Wum**c-Hh,`D]v=d%sNf"OXxϰFTC[Br*A 4P*Y,xLգ) Ĥ۠Eĕٻ!F5:gP^s4YJh+䄇PJ"f-n9KZ#Qw.TIbI'`"xt) Ŷjuf=d\ \G@1*>hJ1mLn%XtX"pJ  rl@~g K~%]4~ɣkmp]S=»!O+2.X.71 ao?=!GV &( p}i@$H4܄oyTJ(6qfԂjC(s3d^g.*UFzӻ,)JfٶEUngT+v8Eעn;fLk=2u%mjƄHRXtVb U}ޱ)Dq N:S(z}KЊZJb쨶I>Fĉ4F|4FGg؅z78Bi&CV>xY4L%ZA| y|]|s'e_`LVPH,$Bܲ^bk4_ETO6盎%ȤAvh\sZg-Zt*f2Aq&vn99XSr9q2=(}SjQc.0d至.f5J*061FRhL !tƶ2 /T_> 㻷^#]y#koӒAA(5K9Ij?$A,I(q$Ec4 =+1 Ѧ*l,Wi7of Se؈o.d?_ĜE٫DϕApm?,*W¼2_|ij:3g6bߩ;/nՋwY]%u?BLaB?܇K]Toj"f` Rj2h6TӗUl 9紁CZ=nȓI|Ha;O LQ⪤S'X'`rݬCeg쎌#N_>Ͽ繹ae;JPzmqńCa刧u=u +??ߒq+vv3:SgGN%Tq`đ FMّ>;GbN)OױUb"|9pm`FK=!G.Rpdp#sް[zF+z4ܦ Isf< y8pΘB4Y;ʀiyT2{8VtWޓq䙽'Lņ.1/+kD\=WA_'"IMtāEb}A/=Nq :|G\@ki<4Gwf 5#m=f ό%0<7܉v4iM;qR}iASDs/mbi% Mj3 ъuy83} ]E}w9\^̨|#`p݂[*]-?xvr^S?~`3Bx>OuL}SA_T=j*wԬ3U5W۰?Ԭ;NWz:޷sݮob߳_/,rU+W}B 3fFBBCVF* 6#F;&D>c! `~îFAb+8R[?ޘ{= G?c,{%š "F=M.K! RTC*7BSfMVWi◕5)PBhiǏ,%9&&2xD*9sn h6*{s;nxrs@hي>x@pLS`%Pp1~w%~)|y ~֮ĝ(3[B^zb%!fX 9Ø|~e6\=,ҙXytGtŒWg]wاڰњ^[=j1d-OqlCRH$7iBpMք貴ߡAtp ]գXpne(v*SvjPHp.JȢV`gV)!`~EKO֒HJmqH@nVQ!W.PxEllkD=}\SN4Tc\-fh+na`qvNJn*Дk\C"zjl.bQq|)O1=Z@ӏY0jmjT7mnrsR)&EU݂FzDl21XWJh-*/ 5R#V)ܢi-UyVF$|lxtw\62@2Az-=9cEE?u?*&jѶ`/PV5+ T܊NHU*fPqի>?ZP%=ĪjE5H;b^AS!4niHK__j{;f UB:ZT'ؓ{rxhX'{VBn>Sѹ5 Vi16F!N2T1a8?s$s`7ql7"mԍ֢ AߴR"O6E/VuNЪ(ͪyDZ(W>&٘Ζ]bˍ%gK62~ ?cU^քZ>\~UǦlWb o/^ #XC |(;~HgnF!OFa'n{3l{|OȑgO`0-S^  uoi79A2PWmo̗=t/q-x,b|]a/*j$l:<7XolqD&ۭLW5KHLE kviW~:oyYΰQMwFoFd ~7]9@Xf/#28W`eXPvΌze\u3loV]{[/ro\Bߣv;[·#+v)"&j7nY&EtnvwBv˃S(v| y:] ɓ!hHm GVhfi*P*%.O_%{ƶ4!*!!3ԟl™p+kjޏQ[C#{Л! ˞8?>2ѤQB#(P=~;Ɖ,n$ 'y QمZ'Z`mъ+YWIr5ύsMG0OwN*zO;ݭwK}vI%M\i]*ÃH pdaP1p\{F&ᢌҍm{mf)3fc(r%PYہS6Ҫ6ڟ-/ <BPZ.Lvf<}bP!ܽ0 Ndaص^+к"s5CAo;Kz?#=U6=>xyz} IW~ҡTk:@1q$hxt܉|YqmnxT12PHU`5uU)VFPQuUH`xZ!}θ$<t WS|1f y*u1xAcl9ZE6AG4Z8r 8C4Cf|2j2mkJ&֡ wcP}9Kk|{ b^Z tfCڧX.9ާj |PS+QԥA.=Sly@K'mMxI|9[ԡ+5h$VDjU9t% 2Cc $#3W+g.%IL4!}Bf즱s8zsyW)5k^dz,k*d^z40Brt4I)”cr  R1/\ ؀ m$FQ9ȁ$ ŔtnOWiyaڸ>m=2 i39NB:|souKV}mA*t6 Dtp}ft SWLk ?4(!v6_@:D֙pf{#1Khr I!8t65gה;՘cf@IF3Wl1j>Kr„ĺa#CBgO8 hccZ8282@äSm5;{aаA05#p ZiGd{>)NxM l3&$ *{WyI2Wϛu?_.H\y9E" ^2iVSp2s3-gؐIg WWg?TFJb9||ᵴ0KJ1;)5,^FV#@M =Q"-Xsj[ag12ɋnJ5EA֬KέVmac_.yTK#ֱ o. Ŀ@~Y?|N0wOg3u-jvU##-I9!3oz uձQBT|B9gJcZ{F!fsP%$2,RԶh qY _~>U2k9*w|OP4VPQ7ԙel%>B3ɨF7d#PMgGɷ\k 曍8d}dK渹zpgM^(;}p:ANs ӆW{klqH ?*=8`~/uG̜{&[ ?=`$ϡ\%A?rD 0C M6唇Kʅ!'K㢬]O56v&L^ϑxJFLr[ *HPZ)J<{:n8p :lQ\mԬG WA y9F$LZ1¯M5C>H/C2I7zLPkKeFi0s1M[(3yت~p6 λfԲ1*Nؠ8љ2dUl2Z5@* ofH>|8'ŶC0uc`huI9jZZ!$Qt=dAkfNQ RGo Q#NgAѠp?ʏ5xU dSЛ.)kO;b7htx?`B%  50gua[Ѽ۶O|34xkZ~^U՘c' lb_iv$oTA z3b yw@ƼϴK}"ħ֥$Iτ7A̺=n_3nnpaxgxa?^Ti̴Ն:8oaC/Ǽj&{7{Oc|3#!m({:"<9}FFw!/36YO.|%B$6S{ 0y Q1c~udb(z axK6}~ OϢ j${C(4)֭߹b~pgp&'bNX*NX !8w ђuԜ}>=F+o)VtgʾGYf}s3ӏ5sY[/;{ܝ%a6:gT߭O\6[ ֿ_-7OY>vFhcg"m. @!ՙud]=}wklY5y}6ON1.b?a%oؽJӋvI#] aˡ`REO7Qgn;Zo!#dva1$*ٱ͘x}Qugv;`6_ :MB7aNRYpvjSE| }ΖCzM.ٌ=Ұ%Ұ;{.g-ÛO]Og}y?NRK:폏gϪECsFq9J.Mb vIӶTBm3К]f]42BDH#M*nV8=E ̋- 3۠ʨC|LglIi ZȈDlU@qv6.M&rrpB_t -D *G?"VK %4c3g?α6@5+ŽsWF"Hr06KA+-&WwHdHV A2wX7$O-n{2Wlrn#KdUSUdU=YMիl05+ E˱֨ˠHe, L\ƺ" c$,p/hX\v?ԺRb-NT *Q~clh=u`dY65XGUV<3qUٺ4T݈ЂĵJڣ47/ӫ\"i#ݷ8ܚS;}t^NP||}z>oNuh˪_E(XL5U#G,vX;Re*mĽ5"\=]FoP7MS!6R~Ii`Swt/ MXJ(+ҝ3ePR:֡DUU8Q*5XC~j_u3%V1zejѨBFY`HMsClG^-bڄo nM4Ut,Բf+Wc5 JJؤ20 f1 ^9QL[ Oꗏ N/N7's_J&q⢴% X(#BkbHĊYʧN?!jDpBP k ZWMƮ,3 yHGc9-P*(R4-y+o'vudb3KwA{esc.;̶}u[s0E*BWJs wC'(siG/{Y?r#Y2}ԧ([ٻECηڍo[vى7 sh .Smr5~k<j6H~m1w6G[:eh*HtUmvG6ezWmѾG F;>G(3vL]M½|}#{A|LvvqA޹LœU]o)9OnipީL E h'j U?UD%;gj ,(f/C"rw6j:\{RIg%|3_} !$YW3_]7){UcUO|BX1/̹W3_]}HoXSĠ:U$0!“18L|$Sƺ  U]vl可4_T,]`Ji 6 vbtUQCCePVWHэqzpB4;} E b'Dm+oI 6@A#ه$ 2NQT8Jl->U따&fTmOm 6WKۖsyF6SjƜ@2\--(WIv>@ISezvxcBP #e.|'flub1D`6\T GyDfJrI%Jm0ȝo%NJ]-qDsGk^CF+]N|Cm]wmZ]y"拘eʲϮPvCÔnKC2=YIöA–Tvu-(qqwdbf:RBQgN9rNݡsHγLڑ#sJ)cSn27f %5 ­|Vځӹ :cLv<-jDoE3"o[{Чp$kan==!,`ΊTԬe9q I7LE:>QTRvcN`LV@!W͊Lœh],y;gj׍olf﯄nYE񯭥z37ʾnʒ wRf KZܶy!%4W[}!Guy6N6S7BJ[y= P1ʷ*I<}kv?ޙz&21sz1WX KAx?_\|9o^Or'NjQ$>rɹqg^ɫtrh&c<>E:^1;q9GF)RK41)>0j}oO?T^\ȃ&mlW~i,DSL a^βn~31S?ߚo~x^^pOleFo.SݙoҗB穐)Aú^=棍q\rCbM fH#;5:WL; /{=ЇSiJ4%4BҸ.cdq>@Tfe hW.-"D Pg\a79f-1kQBYml,RQB:R(1n|: d2d^h#%3V_*Jp8i ~w뭮!-甗U$m*ګ@ ]@Өjɛ5չ_͝y-z^k|]+E9yNEw몿hfj*~?}eگ|c'n2~)Hh@Gв;{GJw3zLꪵ?/u (?' ZD&:=9вșLgNz`2M,'UU8u*y8S,CiMN;U}k$ _N.n.M!|d #}yry$>!_d9;e믞 1fzV~=,KaA}u%sO3tx?ɔAscOTC=ӥODUiN?uhwPʼn0qy4&eh{n0WS cq˧p*=pchw8ڃtc 7KYG>kc't`sL S n;{% u(~DZ16+T"%Τ#f_/7"bRZA].2o[WRHtR?vk+mbΡufum?]V+~ųl$֟X 7x́9Vyu'uSq)PPG"h S16f۴,v/< EzS~Oh@7 @@(bTj꒙m ;[hfZ.RΠ֪り1 y"bˊ8 gB1ʓdUY"g(ޒo̡.ŀmU7^itÐj䷰AC[rrj}Qcu?+񃡺1o|Za,}ʘXcFMr|5vYڂ>f{msW(ܡW95X7n.chKt@d~K٣R=i=96%qWQTci* qD>\VaVzr&b8LDS} &8Iy|F>sd>y$Gi#씵ჭ93P'Noq(tW*8"H6ܒ'.fybdb2{nj0K>҅o+85ю޸$֟,ei~bAZIS?ipp s:V#v9-a@beYoC_JdCZ r,<"$_zwK [ݳ@ڰc` "OӴ(FVbX$E `/"#^fDFFmTc;WЁ_y hV!"6h Ҹgw%ZI[LRyb~攈[+$< xf4"aI 6K躠GS=b^`a[&@d0;]alDH@'MHs*7@^y4D  pRy1hKBA$Bgt @LJx[8cSb΁'$1A,$z띗a$]2T>%Gxj*aE(E~"$/ם??XZ$8Hq]-K v5pIjMGmj\ L*De>s3ˎK@B㲶^*~$G9#3P^ژ^γuۛ`z/ hKȿfQ{>Yvh?zMۿf ZbwI;yr˒ ۔# ZՆi~yx =h]-TImV~tw?l ht78?`y(PBT=5ݵ/n<|x aU'F0Qr>l,Ml_O4<K!ȌA2fd̔8sI/5!RHS-y=z}{g௏6JX\j^J:SX!SFb̧))#u@OETh|js'nZoؽ'%so}oz h_lt_֏퇩J2}ogIp2px[vN#nkϦ(wULj"l?23\3 PKR2I:ΗӃf э٤+jħLST*)hm$qo?Rua 4\cf2:x13JIt oR/o_}g+d n;iNLMTdkYK?ޫF- HF(( L0qyϋkڌeӉBɚ .JluJWECO lxsMѲVA@l*s|U}7^EIixTG㧇&hUR!җ/P+W\#S/Ά=> +?IiN*I:4B\a738Frf53iP.`.\MIϼ92ޥ\"@.-/1Ez1зC,-܃ V{t}㗯1,Ady2~4z"|w fJZ4|qR ?X߾}=b˽=Xa<Wަze܄< !H4Əc 8b__[&!,ZDtэ0 p96cF f f-s,W O=*jiǧXŁӜi*jp__ ؝)K<7Ѻ{wF~Fx+O}i09:M죨%}<ǑI)LT0y`W[t !B)cVh 2*MZ5}o(B7^dv&>[W߬3bgQievws dNS$3 :TT].P{"ׂ[j,",8_MSu⇌ CA.@yBKK+ },U qpIg{ ?|IR? :ZK\ү\I몗1[U4a*-tQ/$FIpF(p'#jh4 ..iBE=8EL),bᵗiBB*OL uó L'҂tN4S`%Hͳ*E|kYPuyccgkDtq#UT19aXGjՁ@#6QT 'K5EiLAbĸ5-4g?wdaݛJ9QUHn/ž̉t:v?~3) ķ P~ZRjfd_Lg܈̊!T)d2K(2 R9 H3`$Qd `!,D/8[Kϭ !'݈dBN)-͎J=e`2)?H ,&Lj1Z!&rY $lc 7Mrm·l$ɛ(L,]P}${]$#Nɥ.IK0.>S(A8]rCجl(dl*ZÎ֌[8?׍U>Z m @]`< ZvO2$nd$mAgtʴԧJحCF)>X)g=4ޙ=gu:#j O/ȜjFkJ\dZ $9Js~q!D;p|Ue+_d$Lʌ(*,P"dţ? j{sT߇<#*ie}( v O]ŞՒT;;djIFao5RAT4>b)h TЯ-}5$"(xWѫw M^h+Fʉwч]`,jֵqPG~H ,TRH{Cn{5 [W95[m򌒝vbZ-j<=]]5瞽 ma=qqq5|rߋ,E|{4҉mEEy248;]*9[>"/L%v Hr kwsse~ &թkw7 [mʋ"xKnyH$)'F >3z"31JFMq(a88ʥ%[*#J7B#jA 񶊥0vO hF y.lG {N#$"#v,jltFV!GqÊ&0Rx}u+E~х0Etqk!`fhqTh U BJiFMq:^ƸqcA(;ZhFyuj`)39,u[i)`cX+` rN3U0`KGRA ,( v X^އ I$1߇A:s&p:sqtNP& JPuw^f0b3ctZW ~}ƃ,q@H(R6G`Dzͥ LX@[*UI&&@QEؒ$41AB rVY"1Dpf"g `TZK*N=*aNe֬kT٣8)ka%CSy3SiATO^j0sw)jpbwojKB𛙒S-2eTz^eICc8h/,)@݆ۏs Z|)˅rD.Sޣ(&GHHbhNup#5i~K5HsŘH/~CИ0Yude8pFf?7Ys9 K*/بA) ϼpm4IJ:`!,QɆU {J| $QiGgftw؃/GHĶlwA)cD[(`Q-vO,%k2E-]h6'r׳1b ưK2ʬ6#")I*@Ϸk_WΊw8:EJ"YB8pԎ4NCT4ڷ5#ZY8./?PzooՑΚWBa\*uzStbͅi3Z20TU^UJqN=[ Nrli%o & )BAxg*u&2&楲VH쌥90a$ F(rEmXd΢Xes~qP*&c3Ek3&(`y@u&6EtZH: "ǰ['{7獲AWgU)wNݵ{)P)eOOMWoQ*WU*V^h.H:H%*)P{m(8ȊrvUYDڷ9tim۷3?iÞ?iÞe68e ^üj)!/4M 6YL1ed0 hjٷu?&iEh5&AZW㕢('`M!c|75\a f 0[f2#D(;R1j4ᐗb,UԚL h?Ӕ)z]J+!orX65b)=E3aJ1gN;&(0EZrJ^lV)H ӽlݻ:ӈ'byJB,{ %ToW5B78춉zQ+6RFvu (R+qmQ "S) ]DEbvf EaRƐ5**|hP4_Z[V䜑%Ei8MUM9?? n0>8~>^mlS M3z; o5 V{E}>0ڔS^0$0 ު&IH[Gt,353ŷJ[aTʅAҊ[IM S+Uz[-gB@muBV'=j$肠RiiO`>NBq^ڌaPH$k"4KtaFY~pa~AǗ_R^P'֯q1>3u(RJ.QwcSG"]++DmUIy4p8IiѤȒGYٌ L*nK@NtB&U$1!3#Cr}aV"J̛eZ b0tZf0T)Mp`L: )t̳+WoFЫ W 6@͘/*aNdYs] +t&:vH:3)nQɿZ*ʾ<rsx9q VO#Oq~c}s擑?'/'ˇ[LZ z?,{IEu%,}A|+xv:Jgx"z\kk$of9Cg![9O 3?I>;MjWdLj@ *rg.tE{"3K+`ڵ G#¹-V|QX[8/yxg_ '0ag~ca$L, mپPZUv8sXՁ b0}9~1xS@z TwН'ёYaζ==~2C\篓yܓƝYEvȣqkq~2|t2m|lzdo~I$?4\{w'p4iw9~9M=氄߾?\Yf`3|{w'kfW?n?|tv>?5n(~+I7G8//8',,p"OM,u+o/Fc0 ,5hJUkq{ijuZL/Z5],5ҌN`8MC@l.vU]M|8|]7~/nY4˓Z0o/ydC{<rsn)t~*}{`0;~ wЋ%3?{\|~q1.zu1x<BX_8p8ScP8k8;zOO=[+m퀟I Bii\nt/O?ǯ'@) _y_ ~S(nx rjz f|x6~1 1_ #ew^Y\ ON{6uNsrĭ} ߟ]O9 |&>RvH$i1yf?=iݨB갆&U&q|) E=>p4Uټ"{NG3ъ2y2ooy{t̳ m031iS v1X}5#b[vWWWL3Bx.QȪU J²Ĥ,MrVe\)jh\XD(c޼M&U\9\hDŏ uZ ]n2& ])tYe)ci@E-A )9go1y1㇒< RNhӶyފ~xe'XMY·OonT ގwE҂ %Ck5aJ8[C s]&ĸ(ƕ~⬉q51&Ƶ1Ƶ߆WL;ત~ pU 9BnONu΃˔Ú5RX[X>k2+"eJ]~ $\ꬓa' ,1FDa5"33Cs*LX_d< [2_Vi#"]ۢ x?RW3vhrox%{ˎ!ʊЇ^c r*d%/S`$`RElCor͆wƄv2J ~P`ȁc? M8 j[Ŏ!;Dg JR*/JX&/ *=! CJ85wUFDRu;6vMND>~$n0ӑen=J+oY}M Tl}{z_!ӗM 6=ɆثP#fg(1"EIX%ѻ{)׸n’aIT?e p+5@!#lȏ666dcC66d2UdRRcX¥ ˤJR "S%2_`Ƃ SDF0 a# mJ*)'T X+XYq27ؑS2#vMQΥH;`GVR#ؑqȜwJ"e4$4ӈޣ, fU|ǭ&fSx*Ui{f0㝯! k{LM6oVԪZm^aEwn[JpfI' QlQU.+)aFy6axl~ATr~cK^GX7dcK66gllƖllJ;[2e2)IIB&I E3[ EcKFڒ9,l~@ߗi9&V)!6m%!bw _&}~2l">Vi+lkbߵ~9ݰԧU˃z= :+\> 0IAQNL/$paN@yN ^:ħHg'2gS˄yE) U͡)pr1Vh.).`@n^6*;};QSD*A: 0LRHbR2!Hw뢬 \y F>\1Lp!NIN%GArΌV#^$3G߷G-C1F.8ZIZIZIZIYhŀXwQPƊu #VEe"yI()Ӛ)ORjZ7:P^bR1(TbI1dWҘ ^hcS RCnD [g(b`{QZYLUh lh??z”1q,8~9 ~|/\v|?.G5ܞSwwAh8Ձ;v˪6߷Q})܌2*Fo)#NalEk,R1f`Dy^D}QieLr,aRq<&Dc)Xs>B J̤pV@d;>UA L &q:ʰ:m2 "Ibr/ "C~B(>H5L#V+*\^ vKfa%XXJRxRP\FwjY$nRD< 3~2-)աnڦ@1 yWH HB%EΪ٦w b6\X8+TQ HjMJ!tD_(W\-Ǡrsܘ˅tnU f\o=oy_f^a^p;>D.B|C^YoZXKhx3z:4X_|g XQ)nnT6ۭկ/zٻ8WU㷮]hѬ*WYoJfh#Si|-i5`oEaHm屌Z@|;@l#zuH.w+܀Fݎh*(7Gmf'Uwv(?}+N4ho)" [ѦᒝugCz;϶ SIs|pYMcW{Ws˃uUUX3DA(^vtN\\Bבml'qI9P#aDBf3'!dN8c<24¥ 'RnNfpE!J.;RĄ!%+Aa~vA_QhFseS!EmNbx%KkkBH'AW'Z#"[hH.:|: pJ{3zHdgkWz*D&!* A6Xw1F h^K GHh*yi!|/Jn\l7z- /k?3W0dh* 9%U7߆ HopP~d%"NP )Li*W,>DFB%| C։1gTΉ;W&#F) }E*v7?X?%ؽ] шfw\a]v8Aއ֤1X.6"F!G( Rl;Ă"tل!p3  r.QIH9fu9R/S2#ԧπ¹ox@x,@Ag7ܗ5FpA-M^F-d^)f=$m@S6iiE iDjr8M5=a;N%sNg;Y8z=+vɼn XOZnE=z"Z k( &HTzYjnNVjYہwrYmv Td2~ r)BLrߑZAR^ZɍHݖR# fE}J% F`^䃐IG>"0cLKr82B&(OP䗮.n sɽ ,BsړnKbV֦2,'Y.\25lf9%mJg9j2 -'ddLGVl-/R1[rX YNW(SB{쪂@K-Q05 Lm&0i]OAy # 9 EHBt:A:S2ՂaB`19$%w{S @!'rx QIkCe[[-XШY1yml K$2d_z'gfCH(; ^ Y2߆ uP$ִ#bT MH_[qѓ1u]&mrj`$>a^:<`dqӂ\3E֋BV@e-0$;Qz%-|:WmF5gxIJ"3f kbĊ[X,,d b;/gO ϋd˯Idt^k/~v5SS΃ӲTZO deʃO.[]ϯ\B>[ zꩈ_vսO|u$Kn^;\{ikNz(7'7E?@ѿhkW'݅<7'O,=1?A~r]vmLvvum?= /|8C۞.5'bHn>:G~3"x1:#Wo~\~ծ(s:iha%q.Q"r Բ%#_8kpi\WN+[gTdۋk2Ȗf] ܲH 1e Gу,H Kx1Hy$ǽS'.?2_}OJZhF ͞Vbjbt/wLxBs1hX ,9A3s6?"ci-K{`#dAR e.l]*A5)\[h]l֧7g o(*,?yO?(r%˳'pW1<\D́/xn0{ G<[wmfbF+\QAлt3XҮw~ 0|BzMu oe&D݊-#Đ"NN2Ҳ//u! x$1tvӌ* +ke=n)I3Y(زv908׳7Y87+.?[@&U ǝMR-P `vIۀjf4S/&p57O|#:|UiO@֕ K?WnICRO#,&J%xԑ+8|d¾˾mi/?Fſڿ>^?O*dǷ&C{kk?ۊ %`c\5|//<{Ckm=Mk 9-/OZOb# 5KM8b8-K! %2G끡%&k5v Cd܀D_ gBZU+P`%:d_f/yÎ}80PB SRm,})5N{Jbe]죮ç+%VqT}Pw EoC5uߐR3r$?6%`os|0q;¡.P%=c8zzv75Uп)N-X 1] `$( |=]asJj,KP, ynY5owky@fބaV2368~TU+yVIic6z,H2qQLR'ȹP=nrWmAalW42SRΊR|Fدh\)<QZOrzpcǒ Mw؁PBq>wR"yj}v+6ooh.*NpfrnYH=0kOg\x&g;sӂ'`IAuR[\ |GƘWI$\ȩ@{ufjnb;Xg~ 鷫 v|sp_:qJ`OY.ftI"'YВIPP<.˘*^AQ͐#;!٨P6l:c{9ȶوYT{GY`V4or0TR)봒zd&D [.DcHJxgs7.uVAS#jdf7@{izeGGZ:@p㬎^ 2.A@+G&_(%0!J@Lh;<Ϊ%9w,)LDǥtȤnX` jIBz/ͷ2qFcyO3[сRe)H #4"lt<;zisLCV J~FMÙTB#6Csm⩱=gM(Z1(aϨnr-nSpw΢M<56t2Ź-t[+%5^h٤[FKO_Se. Ŧ+ּZW:>՜̠T;^Z_~%򑈟BOqUiggW_/蜞 5/WU H$d0n^ ҼP"UZ2 ;www2{-lD$uCHWu9B~?$w $e_M&9$Qlb,MIQ% #& jd8-cJ'&KϸyZYzPBlJU!)U%e}Muה?'!f?cbXKfX)3~nn6ӻV5s|JXd~Uz 6s$ƃAcr1U۔-QuTw~_uU!bj1HLI g Y9@YCVZ2alhxbŐ0HSNHL:Yqz0pQP=,xqLbh}!yxm\km{{7nZohul&t-n>9{4vtH1^?xn|Vod=n= ꤍf%2s:᥎/|bggx%aIʌD ] 3IYʉ1&}V%,Yrƹ\жbd>mO_MQ:9ڧe&cT4D4a?A߅E ]p2 yQǝA% X 5ť#>fS̎9.G)%&M&r."%aj"m tomtU''C ;2dev}#j $dI%lۏfÊKw4z4[ e>]Co(EvZzxo{({[>C'ݺk/ z`.fsl~i L3~i8 DhS;i?_*\sNcZoYȻN?:ͥCR6;[/WO^V'd:95JF<1/vy4|'#i8VH\jz(˜,r|wJo=/y1+o@SOlUO-uKiRw>%U:$%b1s9i]X3Cw]9Sl`#Cxofbu:po]b"Tg/yNJKwXd,>QbzhCθUČfJޟf}9ZdmDM R6]\cgcey/?Rݷ} T/IZHԝ׻r[-.;ew*  Ú瑘|HwYHJVvi-w bծ\zRy\h Gvs#Ƹ1ǒ@ I{s1959 _:a84@і)"z95wp}3(-Ukiw_/,;rnOλ;,k?*}eVKL٨qFغ`9|$. x7jbex"m!Vb6'S%(%[/xGt̡po0_Hol{hӊE1uQGuߴX̗7gRzJ FquSsGd/~{.ތ|Y& OݒU{A?MojoOna Oh%)y4꒍c|ÏN>ózOٿwo_s].yb~p4ә]4]97ɋ%;}mKfyj$ǃsG_twwk=?#EAV^twOZ $k^>xiJ{ mH"hG(HE*dXt8\l6Rl)#P|ݫ|ǗuI/g~Yh5ݬLQ1~5!0Vd*0YLIٴ E AJ&iG?eU$1&xquǵty1)\yypSu׾Co\,s8W{apvn$\[)Z.VWEh'.z Y:nZ> T z?5Y*O{Ł:0W}M_$al 1OhauJ!)镏<ȹLBQIr'cɇ Y- KLOi3XBIFKĔv\(WrcrR"2 fSg:@~k;3S.NXs-]6_ڸ{|te{6޴n+{M}3.l/ hϳKl[vژ<5)>*sLOdWnJ: Tj[~^TFV?I[Tz1#toji\mo?dX6~<3_}}|Q qe q:ʗU9.<*Ĕ9p% A /,p \[Z,jUfHi)g3Jtrs e_`ɀ.": nU^́^/e>*E,OTHЪ&B5P-D7O"he9%h.^[j(2zJYԞ /$R#``;PѸCg[1! 8G@v& LfRNg|g9B>H$r(5Nid 1CB-}*RFGqADO95gȉ倨F>Қ|=Hg j';GhIwK {ixJFu dW ;xw4 wbSWIHԩ;mH %3 sHaA@ $ʷ`I*BiP]W7X&e]їr7&$J%),\ԹA5+cVVשE)0j흐⋤"gpZ܃O :ePL 3N. 4\Y{@˜" ]Z l VQ`3.I$]Re daQJelK܍C1ITQ^(ue6J"G`,*Ek# %,fáZhZI)QR&N2}ۇC`4! (ɈT$mX$/@kF%=Pxc.4zB Kl,Vfr3p(S),2DVy-PJg60  ),h}2@ I-rږgbh<]`)JsEeO U(@퐉s C 4 :QH |HaA@  JB"uEqST1J(Xu ;SA1ԄaUӔ]))C]-pp J|uu q)5l+Lҁ0F#k`6Jȧ( JE5SX\ԠUM/Ey-(_2T7;b"NaWc܃.(S*D/] UAJU%͝Qq$)ʂ[:@HC5]ym9n ~c i ζ@Z KӾE9' B@5-)T^Qg%@H*S+Vl4lƕ b=2A OP_r/*7TlH/}HX:Ne~T}r=";*N!m3dQ$ ` v˓͊+qgQ'cf^ c Nz+P\?B,xD5j) Ѧ诀A,CXN8)s5iw\o@MGiuQdΐl$x <3g#T)EGEH f050 = x}}=Fi`Ciyڀhn=L>P!JڮfO@[7·f2ͧPsX\<) OA%@tjmFF7% c{lS")n0ס<Ƒ0`$:ι0eE#F̥ 2XנqfhUZ %8`^ `rPfs #4xsp%mtP,L0ܩ  -8 ȈfiJ>0=B,Y/9b~r `qБKۤÔ"[ +flڡ $E| DRfG%gD;zRSIbA[!'Y<>jcI%lc\aK cFg= tG Sg$61]"nATBY!uM0b]RG-aH5iߌ{U6!z t[ } 9xڠ"0(&دmz0ɋA`1<B b>o',M ]^x+呛>]CCsv!Zi+b_c7}IEwngӯ t4l s8WEn!5O+6,7Q~YX_* ʁE0%Viq$yl+ /ᷔpJ!$Rܗ[  !2 NA0;I80ū:Z$2VD?PsUdTɹOΕ21Y_ɀYEVOP^f_y5|-vђCUɝtL`󴀭$~u Dtz<D]!piyS0_ogoF,`.͛q\ΙpXZrfQ-wW׃.|=x5{;/Aêѩg*HAna3JYjgfGV,+4?J(B޳d|ȋiw>}X\<.puM NU t3@tj1g/nT$XUz+^|N%KYY@Ikͯ<1FE|b񫣷#NNUkpXm+ TA[^(9&bݹAls:Wn\&wG*LzpSg|zYX8A6oEie֍\Stdv ahPhR1-K2s(>zW)O`b)`F<׈[Io&jo佒 7o|c0Mir.UPwu:' chE:,L& T1Z<+"`(J$1%yBQX< 0 P ^N+Q Nqa]sY2} E/#+Ȇ{΅hr"BY5zJ_'ScⲅqVIYe#Saej;j?QԮ.w:ƩTR:\[uvG)7 ,|vӻ;punARf:^|;@ɻ.mcR3J/Ӱ.epz?OEWF0)-}#ʛ'W]~. +ׅ/\DȔBvS>hmTngvjwvkBBp#S$ڭxPڭ.eDnUL|δ[펖nMH.dJޯ]j7Uػ;vAѩFvL;n;ZV5!!_֒Sד;\n)V2S*Jc՝vѲڭ ELSmBvUAEmTZw;uV/\Dd)v "—TgGcT-!^bBT;i|$Y _,R}6 &л`:NQ (U92GCWmVNkA$̓^k%/%Iv&3zxVu|Xu6q&I3⮳Az/۔廰4ǣR V=D0"5a`lßp ӳގ'ycfOW/oa>'s!0.|+XWS.0:DOvxcyy0OFu27G>n7ـ_S(]!̦1l׀{me&KΆ`6\#?C_aHVlo0xϡDs|Ȍ]NkUk/fRe"RgŐ`U*Z>dF@HzqI}m.9=_a|JE4Ȧ… !bq=(7BT)DM Rǯ xG *N4U81F q(9V? 1!Jr0՝.,0yBO;2 קe`XHTuJ\z0=AHu>㥬7f6A7!6=AsZ#$z=?)1ō*s} sRXZŃ `jOqKXuڰ~&+ZRĽF1*$B#b`v Dj\ - &?1VmCù Dù;0@,.s{8WIett~BMYbP& a6d<$U̔ل|"ZCR"v-;>xZeH60:FsbVqH]'mwB%ymΚ¡Up4m@y-$-B Pxeh#@h̫ myo/L_3&!UI `NnjsV+DaW??+$Β5-)qpσjA K.{QBͫ|v4h`qYJR S2Jkan-nѲՠkSɻ_OVY \n(a|\;{{P~lf4QB\vQCxo /xCRlA9 (Q^型i-sCP` T;C74 yUg+[ O2;z֩;L{:6[&@ t9GP¤l/*oՊQ+[lIVT1ې 9Aff%^/XC{7nI8^bi~[RRHUtC#cl$(~I)5>",_L @܃B8u1]Kiwf:j!( ceYD4xpJa4nʵt0 gH`x ­Ah.!\ sEI4aQcrz@N 8 GPY4Dwc濄l6#TW0ùaV`h{IEp@z"Rx #ʫ$gvJ~Ku]S?'h$[jӒo gm$ 3&e{ծ}Yk͐%"[L@͍EV/Pb)q B* j CYKRfHw։l&*M@R|L%)ڏDQQp5-OYTHCeO  F3gK6e^ԄFȱ6)*j|FR9NwG J mKjʷ JCcn 0Qz吠TmpsJ¡XDƣ4n>*JyURTč40i Zehs]Q dLuI+e>(zi}֡RܬC L/PH 8}-ab*>s)6 y Xfju*x4X ̨@lp`Zl La,IOT!6('M2P4UU+x,JM^^^0dZLLBˢ RTb1i!0lTuJ0 Q0Ib0AT S{[NM35cVIXAR3IUyͤz Bha\5g(9tS߲NKi{PJSœF^7Z$SBmEwҰ*~dma~mv#6kwoNOnA10g؀=tU~jGnp}`,h Oc 5ܡOca\4[ro9d>j\;~?[K 7/z) oCb3u:ZkCW(4( %njwL_#;| -\*r+0^ӔANT" 9 /ES6NsȃT`,["[-8#l&1g¾sWhyqw.6w 9w{|;~܄[kVbtNda=$UNoލ1‹"h^ *VjH˘pNiZ`zJn>.*E9+ 8zҁOoZ:&t:uehاFsgl2ȂpORyU7!CDr͐6Iʨ|xQ#{%Lsڹ/>PAIY~S?jP$83[ݦ/.6[bǭEۅD$k3ܟo>Ը}w涋k|1+A(>nB@23$9V7W 4[v9+~> > $;~/$hYf\J ;#mD+#ws;B'p.\wꁠw=}DXg/7OqyǞN ~HCbs$˭N>8wCt}}ՙ Gh( u{x8(JnjoU?[{3}x8nFy7_a]KA ,6%bomB|ʹZMX-vs:v rn>;$&n9xlaɦn?[~[A>6۫:)E;[| 7u?ȱ| MONT'~CV,zG>8Yú}} ECo8R(1c UPj~CIKf6jo ͟]J3W XTF jrzSሧ%Ih<ă6g YUϤϧ&>xk d \L\/f^.?r}jnƟ5P#)P ~ >By481㤶g  *})K'R:)fM[aPO9uu@j&@go_ۧC5B;\7װ*?=" xz͓w~Q.ca~O&@ゝŁM =7_an|{Cez>'z4˱.Hhy;Ÿ\^q{W?/)wbF<t2pW >(Bz@- W@ A뙶g18XצAmtJ(9VE'"4~O0Qj<}[mx.t쪌P0ǖwN3syҋ>ɀϿC֓w]}]&ݹts:ݖo>KDݳcm)1c oͼczÑ{XF&n;]Y.۷ȡ Iq랻9ҫW7/.cQX~,#)˜3v-'jIX3_?gg)}6#;ՙvM`z7Z`8 0.? 4pؑ:B|=,o ;;5ZC=G`\psn(ɭn&]x0^ߪKG/~ >gt\S'J)Ftl6x voTHCsܮrn>xaO\#4'wu{~e}m6j?>?5Ckꏕ1.~] %=Go;xs_pT3 B^ս: ñ;wg9;2t ~˫%uW_ydpDя=*RΉL:ɂɞJ} |OBh;xG:OAyB.{ldyԜS23s Cҹ3l ,Ј$޳m,W=K?O&h|- rt]8.)Y+HK(0Z3;sڕQZaOhG.=kY?sB"{|㑁OQq:(`LD7@Cyh'Lnb0:)RJG*om /]yU"5?{Aq4gӽm@^_,mJEsn>MQY 4jc@9UUkAF@p3&DYuQ>*b7u5"/ D;v,{XGf>`y]%yANAyk]A  5Z=s/ym;{6p '!cn|t6_| DF<_ W=SfyX`G$؅0C&_;䝄2. nA AO)5j\6Poڗ\ uPiW9F̿K!7#  +ݏ98LeuFw| ! mmnN&BQщC`WoCy*X7G&W2a0 _@ *rnwh 簫[`~Wk`C̉RN-/KzZ;# 0~ B 艐RҞ8'RB/^84H“ojs2 =oގSƀTS:A7N7W#]QrP WlGv =|(H܁s"f2uW;kl%[n(v2_G ##G*PƬĜal81X9@ PU!?́F 9͌`]ux{gI scEFdBAV5 voI8S> \TSHؘG1:!4A-Q94 ǯ}EHƊjF(Lc:jcDs  T #.=[xYb3:_Lt)lC|$DPUB `H"4B9eM,~دr^kўx!)pC̠t^TݎX-cY~':0_VuŶmIKMx$$g%|ws5Y,s 1p|ϣ\20W6vẲk°kPnt1FDƄ mSPB'hI0k:_B N$EJ$" 3"sgK7$s08Bxt3`.%0%%X"G(؜* )vK/ .,%[^XGabm.FE6f>.r:_u0Fg'!xl&0Qsd9tj j5HZ $x3ۺI'`;vSPA7Gjk `@|?NY21W0LyXTƺmve}RR@Onȓ P=6'eLl۝ z*B{N.a+`\r rv"m&a<5ʮݵV5@(VHF_q32F(|X tME=҇׏svG(%E8#sL韽Gٗ+sk$jl!>!D ٪! P*a|0Hpܒw !xse2},C'Iwto"\QjObɸr!F:knJ1 i!R<t͡^,A7 IB];2+P/L=cXoLzWr{Vwz$etFJx>1BL'HgK?;QtCTEyſ!bNW؉'kɐ/0;mqT8SyFC !,߶7G6Hwvy-׭mI.zgoz]g!o)kXvvbQO5aءyvxy%%w9ONdhF߅tmFh4_t)RH[=*yC7l+G8tx'la$ݒ7F$dNo$<[m-z* y!YMlB z5=MsNB;T:5=gG w 3ڣ$Hv:U_K<8Fv΂hn3LV PC曎e:s% ;e! PqAm~h)a{[S5@F=Lnӈva=v=Un @^:8allټ,t("g1!ӣS_ӻ0,rPt_] հbۚm!lgr P3יykDKqa R {v1lNKd}cU_ J<Ŝdh<3JO:0Q/eEEl29=g1ݑ4-oBi{ę:fz۲~xf-7,1k_*vk]p}|wkCNdt۶UQ 坪zt L͵B1{ZY gtk<>jf .17x$,o]T=c %!^kϚ.;M6hE ZؾG4K, ^ʟS[?7WWݽz&'_߮^zp)Ƙz|]_,k3l eB [yG`?i6͗\n1|3wa (}r_lےcx %%Xy-]GO35+sm׀WrH~5xYEx06:ŞB$ =zkH"# P\#k !O$YyZ 'iT/ƌc ^sd$:p=r]2ݐn$ZW(8b*$#j:2 c]㑒 06V 9;DDqv( thh9ZJ#z_ Qo5IiwqEHƊjF(Lc:1߁H5Lc9NTLޘ#Qꔛo8ї&/t3eyО]1mm 5P%!%.?\K2OW[]=3GM*&YgטQq:ݺ#{N7`c.>hz5:*?IxkChUbsZA^3z.7[^lTڃn易x/YG4X' 'fϷOZ7>Kw O˩jӰj'Q|2*?Adf0&,]b޾9N&yuw@Л:fēn6]}҉.3wG=eg03\ixg^d.po<^U7ȳ 8 #}P37j^6=1_ wnJE,H#C7 /krf/ZoLj$sKݟϞ*6O/4"ʾ<Y~IfgbigId4ў̨ ]:T$+//=v*&>ܫdWz)]HX %wPf:JEC¿YjtgXOb+է2Vff`"Or4`N^Tt:M14*Q"JCb0f:M)(x(MqK"A 3X$IBX+ qDGIXRQe,%QU4b* 'I]WW-qEfLSCqc RD"B"iBR$HDԘ<F$%)ɄNJAQ{~!Ȋ n,}<ׇ@j6o0SB!ާRP57`/k7Xᬰt~( 0Y=G$H($f"0c@ 1qĞ./{%z^f'$ 1*FʝX3$W8"L# M IĕJ^K 3'Ӕnr Jގ=kVV- ccfÆ8 eJ ) М^[dX"( jP$ )m?JTfTdP.I2{ F.[[/8_ H܃}i}N86i~ȑ,S#"KdARY!o.n跖(9]JZ%F,]"ĸΚ}~I{6THw9(+ A"zw_?fEC;St@¿ʐ1jSНxDI3;jR[Rc.Զfyf}sǦ:@H|5-quh3BV#NPh>;p;=B⋻0ELtOn#C$*ahVUa*G!D6:p/=)37: at=K. ի4!,e%D6<7{_:k͡e>-۟lq:'hɩ4m={ok?QIUgߛynH) I˂G7)_\0%{pz~bQg:ZnR:>w|wPl. ud ]Qw8vFҢO}-r4)"?Sz|6\f}FT9f՘*:o(.y!i(>-P:!\D•ׯO:@$1<B' D3㔐^;~9ce]q0*~@&CJ{'O}H$)!to:޿|jc`z:*^O;xgRvũg4&=8ooAJ!%B rHAtE px{ X>yGC7Ѻ E瓋\Ȏ:ou !o⊭P%V, H0Rbo8w!d?6lf\:=_֕X6+0Abź\̦|̊_dヿ[oo]~A9eQ[=C.Ԛx%7rjs+^2K5TRB š~Ҽ1_&7b6t5Y 4XnM|*-K>yNƀf[;KYR,嚺\+ JAKQ(SZI1H9ќVUc2[:Hˏ)v,7bžKD`Jyq])'S) XVRJJe&b@kU [TdbӜHTlX^9pc _ [HX\\_IsHe=w@QcuWD$%B0"4`L lӕ`AXT9!`Dha3y^ ;Qƍqz3of % D%qlB㭽r{aE,g ~,:Y?=W;MqڪLJvW}'f 7/W(ix}\Ϳ0lf" -??ud߹[k>mJd/O_Cy)E3/V_w2ȑpx} 3.=`SJNǦ8cЄpp`& 6 i 3K凓fx%EoA & )g]窾<6.@>ykf #͚' lYH^eW¿e` %:[DkөBh⣪2x142JBg̽1*S0F]ޝ'9d-=@ H_yp(@vsD*_-32|)ذ.KFW9H$sX9)vsЏtT !2zA.^_ARC)0*#:j 9S2C7c \es@(GE.h(fCN.Oc1D?fiߘkeyCyX>vؓˏ;Dcb9; Đ,^hDҳT@珅9E"a`Q1"v#GTwYAH0z~%C;"rx>kB}  Q2ts"]bA]3㟣dp&7qV/r8c(O].]ϲ)m]h0/QYJis,u PD(+a! PSF΅ RDNTD`c̰ +Y9!!uhnI s̠f*BJH80"Qe:)p|;Ca? f\J3 nDr|5D v$T"! w2(8|o{}RQ|Ux7fS/L9v490v٩f!I$7,[۞g%u!Y{B[7FNYlH,cܹ2)*L=Ya1hŠ8D@4koyA RsamrL)S5^{Rsn--V. 6 KےGA@$Ĩ,7AGVപyɕ,'tW"ZsbL+()Gc Hxc"BEkb؝FV40yY\SYE`Y@ˢ`IPI ygO.EV$aM{~9K 1[%\QzSkpjMӕOEɟw C1GeyHBožK $a_U_&u ƒ3t!dTwp{Yp'[MdaUXwiHwjD"?aVO'F TOKY-CvD-}zl/9A\~ɪ} &n $ēځ6N={oe(TEư)r TeQ֕]~Η廻~ ߦ '5_NF$S)_R;vx:/9Z-0ut48նAakWv^!'DIlCW75u/f{![3ݲns|F.MUt͚Ҝѷ1ͺh|hbs y}JB BHUU@T$ kFY8BFiU<K(0"N t7yHp;Q}'egOޗ0HӝfQ1fhUR0f'gtNՎgi ()JN $U$C]hN+*iG4Z% GR}tF%ξm=' opPW(]LȮ4N븰 *fnCMWEi>wjhg_u=koGE~J%QGQNEV)q$v{^L7誩WOs6R$)SPǦp?G`nӑDX+UM%%qΨH+juSnqV*òUZ# y"Z SYd7p+d8R{g6_QOHk[]9m-L4^"֨~;dpvs2hiz."t 9̨ˍ[A-ȣ@Yvs+gNbYD5CQ_5.eK%JJWf#Y1ES|5Qϫ yQp{&Hwðd4 Cx-紽!7b(V `˯q_ؑMFuGξ;_!`&*9`Ȗ,f>-{Ncd&Cy ܮ|N6bXceY eʆO5znR{l}5x0!D%$"$rDg"ɭ'}n.+9+z`_ۚ)" &U7I_ԭKA4ӏfqz9OBȪ?]]F͉#To9/ۏ`!rfNˀ׎3 9D#"PD"$uUkɸ㙩HhDHf/@O^OQõ`9UEn- iL2$O$A+7)lX齭/+G#8&HW#QQYd&bv=/듸9*䝐vpi$*8pi a\mN)׍XLWWw׳B] 8lfERV*S1L(2 @Ɂb¤PiV!p.ňPc6Xb, Q$: a )өIWb„nd;DdxyGK$>t8Bgs Ș**++sF[ mSFBFp˜ ,zU Lj^E&uI6"Ju'A^biAn@ȞoR[s8:EGE^LYwxp`8$jƪt j{;AsKg,5^w.!A@?Dp|0ܽΪ&+$ k%")xpyx0IRYA@@V:YE2Ȃ@(Ca+X#КDr:Z]hh;,>*ξ ρpJJNwsm)viTguŢ !TGL^sFiH+{\Rp[O49l`4 [NDg{TtpwO N9BWAw?'g$.nn^fun0:2Վdždstzp8FMVH)!4$,?BɺSFJq:5éG)CdR7#9ոnFr׈ď@ZnFrYߌ|Z^Jf.z#sB29jP?oy! ]f沈tɞ=Wڿ|XQv)f59d޳ǔIN{T %9u)jmJxP- Y4߳ǤI,qv>S|$zy-#O:1+ATGfrd#ӾO"h}C z$1C=zvܳ|}b}x8t@nBbZw.jO罭f]))Co|g7Wn0oDaf6GWWT׽U%5GǬ98cr)풁6r8΍k#9s,WQ-he@4capGs FdDǸni_v'*T.J'̺ƠW:Ij_ +(\BJF;U3#d{%Oy\xJ n&x}2xvD˒d]'k pԘYC{r.nMՍhls6zO W8?f3TB 1ݍu˜hr[lOn{3km+?KhȰh<]&ʹ!ϣ!V8rEę#NF i6JV1<@ݵJ8jyP*l9!}|APaoMHH5/&E$3"whtJɀRY+$NEȱyP N!uXHBƿNR |Fi9)}|Q'*9y,2~ղhPENɸSY0'c388p\b)X"|!9@H%1&k9JN$~f&gINy\YDCBdrքqdDcDEoY.@lw#M0E̎`דs#}d,p- Bb؎)-"N7LlL5ƒɓz((aDrdc Ԣ SO:]"R|2bB w4W{VNƀWGS0XB{&>>9.&Ed['fM}Pt$)C!%_v;1TNwd)|T@C+29u(K|Oa᫣wMf)'D4g@,&=6Ƅem O9cffMwϜ~@C?eeXR#a"D%pSVA`CG$(CPc0{_: VB3`$"+$mV0BD~ՙ6Fssgh0&|_kfݧ˅qPrd_[#*6Fg AIeo7ˎV5O!YcWI<_].OwcU>BUKsg|Mwwװ> 6ĒXp"}tu X cu|{?f?J;+&DT {JղCj PkH5y}7n)hqw6f`=fnaxq إdz/B#n.|.՗ >[^HqfA+B<?fɔ%6+U{OGhU_ʼI&B˻5l5#U^ݻέg%J¼ +WzGEHxvøjJi#:{n]ZOIoVhvBB̶vk#jRNmxl}kbDs[EHɉUtk,8T+CP\F-F3GUs 1C c>(QAjV H`6j 5:gdTU^8O@ͱ@zXsҜ'4.A&,ݣQEOpH;Fƺ y"Z"S[m&$.ZJi#:{n#zFPܷv+F4Wu!!\Dd{[ڍj5RNmۘQ}0VhvBB;4IVP P+րKxj( L,Uާ>A^$2`i,Z*iv jN'q PJlOiEŽrH3"R|:yKi[) rDt6mL;1\օibDs[EH8ǹ&RL]Jp*Ƌ"$h BD+%䙋hLq|h<]ڝp)< D4LEkiyhIA(3j1R)s2H'^_.С"l**kc;(2:nKTpTO%| 4* i\ he;Bs6}y\ׁkhVMŋ{e[*L,y$ ۘ-z.?Rezϖp[D~ݴ Wjӂz~t__գGCb[bH^,?-#-{N"]|V24UNYrPc![.cą,,i Z%)/dJ'K>S1#G0|/E|ie x%|@H2,ާyi|l~W~ Y#Keu~yq#I_vcuFwXϸK*5!," $uX%`!ˣYYRnWo}/@>u;e< .Hl 1ļ7h/CC I CL?u nx2cn niJ@\ŮD4M_NDx!⹧&w+džmig+)"C\ "̒7ֈ rO" +IeKyWOB0^"JDdHx˴3_槞j7.)D m%7 t#B+]uD yC VR"{~ȈNq;7ьH:#4u"8DŽHJ,"$x0\b( Y웒y2C:-X"WjrtosD)EK" !TX=_܁^'O>%9od$jd(ɄW,sٓ^<Nm~ )YE8dj(QĎ ig2dGv$$Mۿ5p`[c_fug$gQ0_1ˌr0cM05F 3_L1ˌ34*c lR+B7-r\%(B}"b4,M'B|C]N{d*͇ eb390~˻*W1pU>pXfIBQZ?6p)q;k^?DE4y)LG֥b;rS& ECu؍Щ{κcClR3kYhq |?YFp֋ħTJf7d@5kGcMM*!c_0dYbc)JA`u%3 w;85Zj]yIyOwT֘R"Ǒ!M-rmmP8*O"!J5VEkYGSXHR FM99"S OPK̀Xc}Cjen jE$Gܑѻr2-ǃŗo7;+*2_ge/['ۘ ;R"*g-%g&GJIyp'Il+|n8+$0?`M$\cRlTi0N|" :ySPR3b 5t:{*UG8~5k@/S?„2橴J0-0 &Ak3Ά*5zg,W19v_Q+XSͺȯ)>վ[`.Ip+L|xyFh1φx7 G?F(M/$+w';G"n'拵J}\ĺgA;˿3z`I[./YWV>2JRH>abd lN (7;rS]mklIBεkY]>=&I_G2WPu/뷿{2XڝQ+#Cqp? -k3JH)q:\}|[oLU T{_iOzuXD9__3U:E0*ha8ܡbXS `э?~jpRp)j*g3nLWW&Mw bvcƊ[Obۿ5p`[#2 խ E^qF6lbWNݺkq]o.n-y~E5ǜZoh%.[qD9? g#u81 \j1-Υ1b*#0^ľ"<[=eq7@w*N 87r WuV,H+X|? `w r:b\\v Χb11=?BI+?0M;Wdqp?@,y$I؉rYd%C^c]/4.wш=&i0!D"$M&$J$28"錰w5?rX@2c,f{iqT%8ۆcdNHX8Gq Iz.WNn3iHҒH r )dRH.~pl*>eJ)I<Ոߓ!Y`87\͕ud iuxf*5c@hK,hQs|TDm. >B yܞzOK!yD(AxVCq2zɄFĹI\ԮY!h[&̹R]hWka8j8R2~{I71tA;x8@g;)&Lgue){%`g"4J3*DVlCŏ+R*AY{ N> z Bgc1.mfS1Z(seX/VJ5iqSl)~;Ζ4 }g Vc!ňrf }ek +VJ5XAb)ײ7IYX)^tcuvY{Q%]ZJT@8>n}1ڳkŗN*5EgٓR4N]H pF'ɥͶSݼRJtZ;@JZH_{ 8Lnwq9`p7]6{at~]LtTIt^cl s4#r)un2{ {"t41RcBÞHjFU7(({-Cyy viX_{<.vVKr.DyzC/` \N}>f6L!n&Ϗucttrt4o#!VS:x9H6%TD2򮉣 b xb|>FUvu;zݛׇd-Uo:zS0ZKٍ}_E9^ F#l-W%Ue&A+nTX\NG`|C$i:I-rgUȕf\!bR ur,,"XeQ^MY\APWDP_j/Ư".` aAEhMB;̼3pTDAKERÊlS?vc yBEReP<6xFr-X,9Ŭ\L^3#K_e'Fc2B LY9DbK⺓s+aAr8l^KIdbcTKpz0b?:$ˋK/Ҡ$ PO}rKL T ͥ!,7kTQ`M!(0,e)AVzi\%!8u`9LQ&CrЁ(*ʢT:|)%8um2D5dI*,_C|LO։$K`t1 N/} acWiIq<&kO@ V>. pdUR0%}s.5I0`֟k3v)>zfϯ!c^! ʨH|q>Hr*ĉʌ4YBc tahpxB$$4`pȑP)]ye{/^Ό9o nn`t g0Ou +9u970bvt.$ Y\ӆ|vhx#NȩE3 b^p- !X!`F HcÉHBDTsOfBpV`0$B!SZ NbwxfDPC $ T%߱a:7m.,FXo#F̌!.4^8 L7k^Ns[ipSrJ1i %gh#@ȱpcFjeO̡ftOBrX; V:6#0HabQ`Zaq<(H84SLW|9BH} D]Z+0URIXT;Er `ri%/m Na9L%n$&?zwfRze^l@&<[>ǝ*!}uu(Gm8xv'x(5h҈)".1 ~e-K9l:k*ը|):T~:ŋ#t lFbEѶMa %}JucZ1"mkF%l4^3&dSԞlV0*~KWu,8拶ljtcL 7u|LK[[΂QԆ*>To`^[]5&_>h2b6UZKTmO_AP2KҜova<{꣹ryg-1诽L/p}%4g~ nТhfV{ep/{? a>n/Z2{xsdQ zZ԰Zl˪Q~4M65pU:a?} }n+BClTMw`!(7MA& Rtls]iR')xA&*y0 -wބl20;ojTv`&ۢêo:㉷&S״pCG_/b\!R][,v٥'€ʡ:&jx=E)oGS. !nv"vY%wuH%[}5vV3vMhdXCnO$a (`ߍ;Kmwb5/fr{l Zcsֺ^Bc*j9”nEOh7ôUym@ᕥSpl.;ZB5fH;J24|2Xiz(^TXnך* SUF֊1->o$8 ޵(|ɩoT玮KhEe࿎7k7z$]䩂-?om,] l$遣3s;Tq}"nv%fB*]׆?q??xo>qI0"3،)24FXrWr*햮 ?y1v6+$(UKD Jlp;E~0G]{tnV: #\'Q(Pp`ͣG ֌a[XFaF uȅd͛ ObV#{6i~T.n|tΊpB߹"s'ɉEvGHVH$Oٺ= SX# ,XW$U~+ڳ~Fk)"#S IrŽc}`qJl(x#KT+MNFs^OIzUJ.((y|Jm9ڶgTyPuj!UwiU 9I3*DS h3Z>2e)t.<]?RoW)UVN F9P0\o:)XԆBAɑ*'f$X5{Is` 3Gِ=^ڤpG`\$TAS)M o-Z+(Z_2P/)!bhI!Q˖%C\p[EcWI`8ybe6f1Ɍ뇱- fs""E%y5F fTP]ʭ5ic?%zWVkߊ|@HF9+`R2NrNA δCA?^KwPSQp$Fyǻ1!c9C[Mt蜰|ARsvn*XZ viܰ>9B]BwO]8w@K)['*9o6t@jz, I%6Nc}c p) DGz Y{! ?.FB>b0d1ޏav6~G#בHuyG!FS87\Eq9b ".xtK'8E^9~` ?M˙PqnG{ cAD8.}/PmZٛކRq}ΗCTnq$E9^nfv?1O"V%a7Wf|,_= p='w:gQMybU!ufsc$B \B5(=o3 QvW?^D-O=߽{sQe ğxe T5 -  4Gc̍JJ킱?} V "k;tA^sbʴ^~ϋ]Jo{׏U.ޙ?:R g F GiK Aa6gHyeexωS pxc9QG9*@kԵ>ˣslx@֮ceOSa3497w.|ʿ\|L!\<fR +<۱F~wǘf<4TD"yO)WȂ[Vī_.i4E4xˉJ(贤B幗"80r8pc! 6F&9@4kԹȑ4H'^4o -ڂf=%.@! pcF,,Z0Gj V@\&d(Xj%0n hYX%pM cv9z[R=dÀƴ0mvӦHK/3"%D,Xf KRq2DFT$90?`D%FÛT_K_WK7ϳ1<$Oڑx`0,"وK B0J D:Ɔg,/LU:[p.'d_"$wɾle6jX<8-7UX}-鰘KvO{Z`X$QR 1[Tm2pk3 QO^n2L-3eK3 QIؐ) ؀לÎpKe+C<0PEVT(h*S;VG RG8),^řI]ܣny[1eS}ޝK%!-] .yco>Vv49kVUI~ _+mc^Nļ@lG6OtxM (.B" RVKFctn\C ux/@7Ƌ9Py1Gbss[KuyqiuU^ ʋ9Q`y1`y1^;4^L>X^L);X^L8X^L:X^L%:X^Lu#(hkT( ) pd: @q֐`y1#`y1c`y1`y1k5lVCbvu#tc|zSVۨ1H6%^j%zA-?狣lZuvd%{#6kޗWe?Ү Ժ%IR[er#R3D*+FRgU ٬P.^j`GbGdl{jKiRSLQGFRg`-qM]Z"&K1az9ŚԚuwn7Pt1ӑazZMݲezƇd1d!7:xstmwff XJt.'aV>gTZyfaCa^1rC9pfak Vm-Wu{M[{qQ;nYޭ 8Ds0X{ݻʓ-z\ԁN=6nퟀMe[~wk!Li~OnͻqTjѻtrﱉwko5TȝyJޭ 8D0U{ޭZ٢wAc^>&Tn-wk!LU+T5ǷߋB@'xɦjw-Zwk!LI`ݻin:E3Qd^}TEu#r@}Кwž[@'x hn5[9pfaJʽ-\nZ}n:{lZ?Sӻ5h$bk׼ַruP:ĻYWβݙwjwk!,L1ٵ#Ajy_׵7:F]{6jwx胨V=o R+"zپ^љP=#ϰS)}me_[LДѮEOK"k!kfBS]R3љ(ZO Z9 kA6k}Inљ쨽3Hs}љZ!Bs}ə@(\ahscSV[cscNk9f3>瘛 (ٵ3H-ꜞ&ڶ3Ӝ9>Lw/ qVC17:8ը{9f.Xcs!Yr\is3Aٽ T9>LDt0,`Us}љ;j3,4s}ٙPڙR9>Lb8Դ17;41D}179d{9fPcs̍]=39f͸s}љQsZ s3Ajҹ3RR9>Lo'cƈ9>LXq{n.OF0+Hpz1 &gq=Lߕ0/^@BScFVQVG |ߝ7ӓОn}Թ/k^c$pe y%A4a$zi쾟Lŷ_0Nf>;1y1y?.E`Lcd>~Phyp Ch(!`ʪxc K1w0WztZyB᳄wv,*x(gKqƫxGed6^0)_ uPn-'|iS+}GhV &ީhPDUT!g[XIQ$V+f6Ր*$ǘk.IUtQXJ ,Bd@88"P(,O9ܽ 0d3rOӉ _ϳi0F3¶-#8Ck ח#3cr>5PX~U|/f>1|D̙,ERr8yYyC &6LݷWgD%>{SkިkjXebEHCO/}G?)=i5S_pP;{vYSzyt۞Ҟ{>m<[*Y,W)Iy17G镔cJjJ`"IV9`ϩ&R5XS3ZLE )/ vl`\jLDT`  ?d*SRcjҚ^؄~b;_ԁzXJ9;wf{_ 'е3j!3B 2"&q]pS׌!QQaߋpT`4?N33va_}<1,K230q0:jH<7 q*Bu¸wa_/=>~4̗gNo Osm;!6JbJ+ƀʁ^dr0<|I]/>H%LSro|,)K0$ .R3}s4E\ǖ~x89ϓ!E0p?\X11b=*'ΛLe~w#M~C0=ݙނ/:\]?~8g뛋]3nٍ֫Mdo E`X5w1d֒gdwwgEY50N.APj__f pvkz1ٰz/K ˕'X;lߚJrPꏔbܼܳ F'krQ߽3Ocas966zGVR&@ )ypCA[K \66wppqz,?RMvLi"񚂣&\z)8y"2  P a`C^Ŵ-ya{u)QVsƅ2E\⒉πm#pDl:u* '[YiqE$,h1+(˄A^M3`HgCլXj[9Zr2̳xJ]|~' sN^*֋r6rz6>*T/|g(tb}T9K+ sO?3=|?5򆯮[ZhmV:/5Z_ǃx=/fOfIU:}Xmw'AJ́r]xi79RWM ML'!wo(S*xb[@П #Df^4wf^,d:+{k;bMRY+v׋+36o\0nk_(oWR+y}-YbAyUi*\--ׯlv|E_7u$bꙦr+_#ݺW(Pa=w t*P&0ts'%$hsP m/L+N*6dm[ |ۨ;t~;j{osdBgVyO'%RRPO(bv*ię[09Du)JsU(KTH8t=n`nL=xVIo$(B2{<i|Ƚcn@ >,?_e@ ɝ+hx˯on&.`^A 2M#9= FaG7  U.IӡF9Їs~1%b|"p_t:#!̹huf4WNџtTXyؽAN1@#=rƒfkAZTjs6ds\,:"HHA-{zWwaw`7~ E-f9z'lL^I"r/lLȈI%TFoJct-CK-(`n c= M-^{1*𐖌-n-WW_ZI_ő~l 8~ O'.O(MS yiXrw+W# `J}4l4@K"Pzf pʕWOM١Eb /]tJm53  C ؁" gA`)KG9 $v|q!۱:! 6& uܗ؎Dh«NKPx$:(;\@0L:o8'T? +S +aOp}[bM*^;g%[aVSv=<3(fq?Q^=s.>N|zC;gFW;m# Ӏ_iGL9ػ uf^eFb>l">[ `DXGF,s D/ԙdac+Db^&=΃pWC_ȐAV+ 5wRms%o|VqN^iͣ=zn:_dq!p9}ƼՙNdX!^)mW[LWNbD_Ʊr'E>x+&BL5*n@)- `5/|+If,we0d %QmKCD$Ѩ}7HqSG+Vk_ '钄;9)Ҍp÷,GKZI3eG1w>BթLJr)reǏ]H@"#ci2?/jryLq7ECy2uE CE:R&i&EE@Iw S{m}L~U/k 43-(y. σ7A_IvS^$لH0TR}Uۭ>{ vg\hd[733ݚ>aD}F3IU-O1ґ8Ͻy=c:>:hytӄRGI.ū栱XBvMcӹYn] 6 Lٴ>TWmZCb%%wseӧ{|տ @ʺM&p/NX mj^XG7 䧂_ZeHbՔf:j9]ψa0^Do{WɈS!z/`Xˀ#0nt=amu"$+{bnO>X}.S궇NESťl=;JqV9 ʫ5kgyi:1b^mLDPQYx郝niNx>Xp3$͝)]V^x-‡Uٔ+e." Ag=Ewo3ueT xj%[⯟4眡 t-TЯF_%ɧt_2N %]-Q^Ii}fXȮ2VRvU{juhtݲzLV*+;\CԱ꽫z] HL?mP{ kĸ__+Zh6UmSwNdRv(!aKkB`Te1}H guVP>[ǚk-GRJVntQ7m}soܖoߔG XI8` EjϹBScc2H< mII_~/QDjHzA9 sQZ%xQ)2SF J B@̭EcmMͩf$ccɕA R&8W<7J&eضɃ6&S`YjIar.#4ʽ:`}<1"(>GP 8 X;ObI }ٍkFK%ԫMrZ cZ:tFK-mҗ$5HfY&Cȅb9q $'y1a-`(4}Q@wC&ʝTZGmi褛p ):4t%u{=Ӷ1JL[`0_@`Yd%vVj1LHЋ".m`ޱ:zD4:oajVp u1}5~x& *N|5wd9Y7bmb^)9E>8s1s;ceAFxI:BArqV }fdQ7ɃԘ5ΏN}Y{uG8YvfK>RcR8 ؎r>h 7+NٟwUp3u?NۏonQC<7]Rg΄``g%:Q~/E&(V@OaiitĬ[% bd0P1d5^;DZQZ^{R-5J`DrM1eTV9nQG"i\P^pbH4U0 ƻ'v9ր "Vz-N[LPX`@)`L- J\I%!I k: +eu` 9& :*(ji _tiԣZ:~E]q}mŚ/v5_x})V{Yop1A( p0,,7J^+!'\"s@ۗ!{gj`Q-o蜲 Q$i_3FutRI AL8PGHS4XGI@0'D0yG)IDr -Y[mŚbkӹOIuڍņϢOo_KV}ܭa*َW8a:6xfvf-vF/|L xqs?8r9/2z7Ks3c3O:.a^|>|b)4П!QARaRğ7sIw{OX[,h6{ܳGnH<iC,ڈJ^nZݚ uv%1lڭFS[gF<^{ݰ(%/ݚ uv`4o4U!q)Sd;ٷ\s"DyzLgwcp ,z4'3۫իbdZ{w,?o|aP_ld)ݘ4YRb J-TtXq)֛7! !I[Xp=R 嬂*~9ƶiuN)Ŕ{0 p5MeͿ~£_ƚzsë"x+O\฽ƲvvEGt8c9Dy'XHAd^3m`FxǺ!K)T6rW茤*-&.%{$ 6&iw]h`L/"]QR0qitI:+<^ߞzM;9)(R1~}Y쀨 /Xt%VJn4N/9IfweH2;j2x 藝݇Yi!ֶ.Ki !e֡b**n,f`0$3FY:)1$>Azр(. ߩ9@l(zG7w,VBŭJ6V^xfn͵9J 5h0)i|d$J%16'!)h͘&a"d5@D|.$Pav"ĕNx8O g2I$J+<8"d;.w17:c> {L);q2rnœ7y,hbyՏ3~^c;FE]do8?!q^L2ͭ}}iU xq}(QePZYM;lhe(\m۳Y7~ A?>UYv%[?jk3zycz a|4$7剙"w!dpʼkffG6M8:Go;iZ$׊ͭһI--Dh-%5@DJ*ӺԔA [w&<ʋ=7a MtͦxM037aY7aja;7ak}^q6RB,LP-jˍ%bC*F%4JpFOw|=*.$36`hpJV22 e2 t8&Ē橧/[+̮[g׍M"2P U 6QQn?imrB.$#\Aypj<7NF~9S`*;W\]ΕkqW5ep R~PckM`oRVH u*[stry,zƣkuN7eez F,]X,}Օ~{Jv9$ՒJ«*4?>m}%?˯}ݷL-hbM%(%LG1#~K_rE7>ET&OtMyGLtCM5׋Nn^j$k Ԉ=#5u@DTWӇ+Z]Ko*j<^xEl_VBY|DFRˤAdQ"&"QMuH:n"ӂTш]9os'2ڜY 'K@&\6?,f_:eW۷DkRgny$?\R'mv:Fs$,IS_"h KC!s4a- K`jBGSe]«e|y3tF?\[>o5OMn= k'W8>t.ϒcfx|%_f()VFKfL j2wsû)gu*w~dZ1 ҏHPfx`Mܭ68<=.+TNʃޢo\mܵR؟l9C5]t+u?"84ȼs\vՓ6xxo(CtV|`حȢ~4woIi7I}wJϨs'po & 7ge t[˾],W ^\];{Å 3V=$Qዛ<ұK'˭G%T s A8qWR?8Ӆ,%j'qE++L[!tꂨTbrw*o5M\ 5Cl<ƋERBia-r1%8%QC~ 1ð$3B 6,JEnCdٱ~wd f1ԁVK(`r4) s)ryBrgDXtId&(LeZuf Dw[ZkW*jsMN .rF'L aLRLj>]37Lu7E$<.v3ztUTII(a>} >35ZєWh +BzO|qlLn_O?IpFLc|vefd| LfZWk12e\jgKzr5"J4Zst(p(P mm͖76 ,ĥ z%bļv Kd) LKDpŷXpFEK8HjWniS+ B fpL䑭jMsּfOvNzJ A;@Z()씙 \1睼t_}zP!iyRM2:O€}p0+=ӿw[x[h Jr5_.n==/$ 0X|Hr,2""|x+fP-pu]W+E<~wOkyG$W,X\o*Ah)j|Oe.ꃨvGJg7Qֆx_]f 7 2Z> x> v@kXEnhՎwcki55\(P>S\f=Foebh͠.G$^_eTx  lL_bԳ?V0ld7\}UrmZUo{5b`MQ8Լl\z͎(ky1TRn Q q_Nc i7=*U1Ƚ|hK+i6d*DLF Qmiұvwk) t\B,7g3 Eh;ރ[yv> !\ٝaVC}s=ħ &<垿(JAzw;7/rkPj YϭVmNО!O:^"3XBj4΁5p u`Nk\*|ZJ8Ӻp0gO֭tLL}']C= Npd=wyb kAp:t#F=mTR j@+6b0WtJ/1Ǵ~6;I)ttpxr qnCճ$MnngVQM.⼶ɓIʳ'Om;Hz?W}Zm[4+s^[RSse%p;SPSI8~ZhSB8 #ƨ"c5R隷? FH|کI^uQq (6x qWq\d?pEu?P/5KRQ VH0C.0^I/Y>x)thR*/ lu*56dr-WP^m_97/u0 F9nމdπ[$E8ZIrNf7kոO]s@PISR [hj ״\fi\bMJn`wq͂k qˢ>cԦBJ*a>Yj6 fsl&m1z;| OH:LKNwfBz5Ls`a3+ Gz7h!h:_y!1 Poe4F: Ucy%bw!M)UB5ssr# ibR ZUEܜ׼W6$ Dkfac]ʵL낰ZXXP4ψPƔ){N"\h  }* J'Hpe `S115J' #)y3Zt6T+̌7HF()0D̚ qK (6:4gl>cl3VN21cB*.ރX#<'  9:ĄQ&-iks#f[\E/BR\i1x]5}o+H+5o-]{h|u_hV?BZzoJ?/W <lQ*((ҢJ0ni7u1o0/λ%|3HpSۛ m惮ٰPXkp"Ż3o`pj+xCU]i;ȾMѧĵP 򿧚E %lû_UU>rAE)c8}td]bQgv Cٙr@r}ލE*0i̛)F!I;(f6$D)Z$&܍頖of,O/ 39g'؂٪\iZK#\N,v,Jx+ 0 1E)QڢL%@ OV.2ˎX`3vfJ&9)T,#HL@IelD>S졎˗~k$R/֋l%^9ӱ7K`fP5y(/swQt4Y(dCf\x aԼl_7iő5Gʱ58FG '`pꗬLi//_Z#5~<PdV;L%Ԍ Rx ZFZ 72al -ȩĔp~nUlLiA{8,[ò//L]j}&}-G|teCxM]}i"O4VF6m~z1 XfN|y]xt|T^iǿO9 }$]았y'Y.gʬIKGG~/~V2]%ZG%A 8{%1> ~Gȡ#v0J}&%Itq9ݭкaU\7)%u,EܠE`Em% Hf΁ 5j&s@H\mtx|x\R(㢐@2S-V˓qVf<^Hmqu)-LXfYu<^Xɴ&FIJ̄+^ ڽi7;Kwa\zE!eӁ$w.k2UوG7#?̓\q<"=\Mf_Ox 暂^vczEW%^/޽N|塶*֘~ ;j!<öJ~DDT?zHIRg[4j jU^gR .e/qo49Ir6r4 eL>^_}͇zNYOVRy仝l_ʽw`JcD"]̃asē~'"]N .OiWV"Amʒ)35~ʸ$uz)OӕkcL Gqڀm&mfriOz;JY_#:!db 9ྙx)3@H}l=ۙnG8W{k^Hְ;`:0c|a:[8vw0~\{jxC3_`d&D?O;?me|(CVa2jBE? Qjm(R`Z6RӼ4QÀ'/4c7kg躾ɢD1VP6ӳ~=_ke{i*kgMq#z jJyk9ZTRs=1+K@s^ jb)@U7<;@Aи_Yvv)@sJ3~ ),OFPH Q :XTE"[[TQVJiVh'{U8ύ9x4Q+r#A;iN8DdA{@G29;\)jm!P{ΈYX^ vȰ9\uɇ[7>IPQj'(*nK wv/h߯3j_~;Bo$n&0쯏'0?=8 / qT()vEUd웃k7`:ќюtg  Vg߭7 +eNb&w[oӲen9]By޶Z;ҜI>qk1`ACl1];`}Dbe2;7ojt~ޅRqZ4ĺ7_M '|XI< -׽ >9:k 3턿^v!nWFw;JGCڬ| ZYqmlF;z@3%#4ktnḵvzS 餗D(i;rxr5xcY5; nYAxn*fT֬ '=s%0Jp$8=K=UXOGQ:jjƥMZx@EłIs'f+HGI+jd%G7)nHd`6h:Xr%gГC) %6ɣLojZE}\_^bYzFPRY3[rV\7K͘7y 3G:(=N*lݬ9{!PHz'23l=ӤRTqQ8AǏ7VYGȑBDu8O 7tt).)kdTX,0ϝjA484 B.2W3'S].a bVF2 92xd't+G֣ADXd`Q:89+Nb/O㭮X}8eȡ쏶A׀?a|ȋV`)ɋh+g= 2 j$Cza+wiQJJVgDYr 9@ɛ<ӠaNdVv[yBvwF_]04N's'0oOZʴW7S݁`bW.ێT a&۫:C^v.7u]Ӣ0zh.:V_ARPyKk@4p,׏zfTdl5El 13;lv{+ Y# Xhdǰk=iI#kTAmcW<2w+m#I"3`z hILҒ[ERI:E[nbeE~qGfģضuk7Y00l(_+D=ŵo$1D" T!W؀>QC Ax4J+shr6 CPVEE\R bXmIT|8qhe|_?^X&k Ӂ>e# w9Y@b⃾hW4'z.?-zS@Y=MvLҠc-1~$~4o}~[Pv{d2x%w׋܋%y={T(u#uf&pB\=g{4Dmatn1<%fƾa10@"Vu#-Ykla{)Icpu.ם6!,|&AOX^_.zУ8P[4`bjD2$}̷VF-/9APږ=B}2}dzS4kH3 (SI 9=Zߺl>9` 6H5B5~Pxdǃwqirhw ̂$IO$uՔVnf;Nm{:m,#Cd6M) 6:,{=)a ]?j]85M ̎8),t;+ko6mLp^PX)\1Z2vLBOBфGFi,-Z&AqL:N:!1c(6ZZf틆͈n֏vAo*EilD$Fcvѹ:̫I>ˬ>`u݉~H EOFkSʘ{jւ6^;^riu*K%\VcJb Ԛ[B6FYvl {!Āt -x1&+2 !ZI=snA?w-eZqk6|XI5ғt wѠ7hLy6 ْY>>)>d[ʌ-=5d'l foKru1A*[y<0>0P;aZP[x>jA6|ѝN"9 JFC\#?୒!u^~}QH H_~~=9V0c,¤'#>xG*ecyeG'd}r0Vo p&JFJ4pҹL7$s)=ʞC}RxBכ &8-~+d-\׷k@F[2_ƻm[oqrw$ Z¹fSF!] FYr>>1|IN2^x~rx_= mFO25i/<2cE>2ɐVjr/zсp :׃̚d[[BC{*K z׻ם6AY-r@6#@>/6!9I#xU~}N)*RX$jp'N{Jd*e)CIևY6XxRܸmpDș1E4(>I"TK>8W؀4x=#S b8kancb(=2#V0ё3mw~XIzu%G9$祚|}ㅿ÷ Q| -성 ڒBJ]=\V OyE4G gay;* 7v8h>݌k<\?BJ^ f;8;*3׿Q\6HFLH6Fi1Ԉi}MqL76x)6]/[o+k0 c`4t#crprVm@Y}by?[]~"rUǥv0XJ2_XCnyz  o hIi@n.{Q&j2RTZӚj,4dL BqcP. PՀbf[A*fo-$ $hL (e{xj˜ S#Z+E&D0~Lį[flN%})a z@\4 o O _9tkDp\i^w{x߻Wѵ2nwFo VJns.~bQE:  z(Z=(̱^ph>k%N=pjtlt2q2`~ܞRWAF ;×=C[FnWI--v``#Ǡh%Si(>]jİ0X6h!$| v D:"\1Ю Z0 asOv՜Bq1-gnv:Krsysy*نSJE!xx@X̮2D!_gl;eBe"MYɿ+j~]#^)Ll='ֻzݓ rˉRrQsIZڤj,&{L4@C Xj#okm^i6vo.Z>wwS_zꔣM@X;ğgy*Ο9 :8*zh~Mqo \h4lUΤHI<\]w=I I JGUH5[|+U!og>L9~*S81V)W8aȌJ03Ed;bB樬 tP߅zU.ޝ2pPmTc8jozpBC *>\y#ȿY@jwAn? XUD{ٞw3Jc!c<[(OlJ9i0aꔠS+m{N3ZU0ud1e}tQ@|TG'cr#,>ʳBgŻTlޱCU"=f,<ң[C/:XSDtWڃK*[atBQNn,|b0q/)rԭuDp)c糋=Q.MEj{Q=,g|=N2H Mz q-yӤ`jPS,y<h9\[Sf r`IC@ޠ ɠx,Q͜ <6XcעB\wÕv*sjKE*L5% m3P7qq,B tDZ 9w\w\;wXQeyl*4* #NQ^`\TN? KUjK`SSՔ A1~ IS = sN"w^w\׈~QU@.Ӵ)\WYg{HW#g@겁͢%]m0Y$!(Vm<6f„Q$bg~U]]rqD\Zvu>UoX;\s1RU;p"@43$ (D9X)L$FX)113(/ksС۝n$9&=Z$ޜNXOR`Wbj*"6ȳw;ardaӃߐ&^Ws=/'ߏYÜ]&EtM@P n9}燝7gCϼ=ݣyß?zp`κϽ^x{O}[?}m?.Nnv7m?eq}j$[hxoݹ`y! noL*{ϥoL,wOL{yyf87O|0Pz]|Z<~'qfsMﴹt6a6ۊO {U4jNœR9~~-쓿iy}؂e|w5'fuUȟW|1mca|}5Q#_mw{c{ 3k#%Gr:}yx_z2sq p&q}ٻ~AIwXrǗ9ޞd8m9'jˎ]ovs+䫎~5.D:fs[zT=H5B6vd]ڷLفt?Pڿ}'_ԇg0Wq㣄x_.} u7TputⓃ Ω"3V'0W΃ZHIh!%BJҿrz3' q'zXk6FfLv0b$g2 rHQݫYot?fmÍEn- ߭Q.9黵[c}9x[~Z+ÌYH [5 vFTc0Xa0F iKbhJf2b0kp]g֩5u kp]{w"w9$z]NAq~pb[9lՓ7zV($B+a {hPQkp*,A)NSy 8oy<{J VNTaja6j9y888qPi@ق`p%qYwPiꅠZFr: ЀBq[l`+ 3&Rn &܏ ˠo}Dž e+Z"qڶ}ߺwUhۃo_nxz`~; ?ۇ{H[.cNgzfv/@+ڀa䵹_ k.|5 Ԓ PKTT)Ȗ{Z#]6W5r6~i~U0S<*D<:J~yRÒ%Ò)vZq(c2CZk9E們۲/f(Z>97pu =`h"S̡))fOUii@ҞІ : ٬C$& Č1T#ǐ&`+"q,]F~گ^Zگ]kv׮ ygwH'Y$mAeN41•QH!d!1y7vOv`BLSQpzG#ʅXݽ;&A&GWe`-Uw/rǷj-d\e}vub25AcT-"TI3ͬ 'Zd$1V1894Q10Mtr(bP9BzA;F!Fr5S b1䔣H$ ۫_S"I+\[3[ZVw-%s[u)[1bn?B眞!,xڣNNX5DSYsD&0bR0#OiB .BBu<.Ƞ)A`Eg^y: ,\ (YZƋ4'|.I&(a`'QxO"2՜X'챠╰\{BOH9X2p*eiI\cg!/0-%3EqKE)J52VKjY*%]`MS N *RgU*Y,rAy(jkFR"|6lJ?+ǜ1,F!B_6JYrjGS&"4ñ :g#$'R9u =@| pE򢎦\mHIBt'VРuQ'uQ'zeVЬgOj\ޟ 鄇4hS2 iDH%I^ Iej0ʦR@"bTR\%ASn& b5:g72lƎg!n86f a(y=g2`3Vrlc₡{,`gwi+tDy %.C֡ %u&FLFqq[)r25 ޓFաU()Z3Y,΂()Pa~Jc&<S)ؤ= cJrB9,T2]wV1]#Q,<b@xuz.gWߞH4uλ.-M̈́J)4'M]{n=۝39?EVRsWe5D+o*Q٩PeQ<$4Nb8G,ڸ+ [3hLBS9!k.DFI֍q}~,i777cBX5cN2S䴒&*B=p*̽!e"uĦs C;I"T%.7Z~⡟ͥA_/ܜ;d{:>Z٩d  $Zk.ELhwy/ ev&meL/`/(ㅳ+C,0Nuʱ-^pXP,gO3snG`"E9?eR -*^C/̟\kxC #yuÀ)yXmG7VE|k95E|h_˧| 1ri&hQX=#<XDX|BE;=eT9` In:F T"8±;Ck6O>ckѕ}eⲷ`zMLVwk͆Yg/wLq;e7 s$퐤mw5 u!1zG/ڐwL;I=֌\ޘx^$BID2M,ޚb7:{仗g~z2X3n0LXbh@*l" yc=scOLs9)JYI#> NU:`72;y΅*m[7/ n? sDtW@³~NntmB=BцܷĀ~[>@r%ƹV#/ fSg"+L|LZGVN@?K,lm7^LY)UH(X:7-8Z. s?8%:B+1"!5OByoԌ[a\-28%* /B/ S3v)9Ϸ *}U/sW1L?-&vЩBUdH`GK0r2a$^&NJ';B?VAZyC)J<^4~>)PAC0KKnKUcPVo4_pg @(ڥ(ZRmJ*qF W%J(!yg(bd Ypm-*p  &j]ko+?́"~7VӠnn6xȒ*Ɏ/?,ɲnd(R횻K3$9rj$CLΔgi*\L%4\xrpoKE*^`'h)٘CPAk`ZaF Xd5b^XN aXH0V!ǘ['A'\00ܜ~9 'MM0|BkSu{ܫeM'_ɲ"P#c 2j>OčKk1zmd7ZkK5\S]Fe{aƬq}ީg\.wxptFvӆ2?[zO,j|;:5EaMK[~fAeJm᝹4M_=[xL٘K߲`7T? sY{߷ʻ!jw{_aQz`~޵aT o)TcqlcgTo.>t[gG޷[v!~oԻ]16{F5^b>ŃW7^X>r1n co~P쐏_P)x`F=GѰX\_Ǐ9RH׸y<~:tⓃryCVpFo'SCuo2NzW% P[3d!u2-pZY,D J3 #nӑYo%͒:;2`Ϗ咐gw~EoYQYo=n' = c yD)FP"(2N %^[As8\jp[n"+Mⵉ׾$^kv;:kM6 d,]I/k5&q sfi 0jT?e* dj@9\F*;iym0‡XYi-B+N cB#ˑ`!ɱM֮D2&VX-_$VXmbi6[TWZf5f_]>2=` f[M&ZÀLvD<>;3#SI`I6% Z k}wQJϼQZF2Wfu I- >Y$! ł/b'|؏%+(0 SwmJV@\[[ `l>vbmS&(ޠ-wD׺1X3Wk ;!kITk7^x @FW"XfNQK5 SSg)@LS~ʠ=? f>6<)b*f"@ Q:8uNl1< &2fL*8^0VOIIVU4p?v5a5B{(qy*y#ZQ=Yop笿y0NN1"b{VOWS]"ǻL#pJ^ߜ[,m{CaS; vnLn0EŦ{ЌmVDX8adBsĝQXAAe;k 7 L`Qr#=H1NB3HOƃh"$كkmҒxb?I53%` 1X0 1Y 3"uu3='%7DAROxjºO; geՄ 3 YDjŠVfłz+S8۝ʏy^ 1ĭVъ˥4w(plbo+@Qyp0)eQ:z+jVmtY;L7…< ŭ9p`(JqTn}[;h֨ӇAXXT|m4UE[ 3<+lX݈aQĕ&<MUc8p(q5$ +G"z0EITҲ3HI "G"n>%ij8! )װS"#,-0n:@t1iz\,YlW8]D"P t)x iz|c{u;e" ƃ(K㳘pcD:vDa"izt9i9L7qc;D"ƃH9I"Og2=m 3P۝m{ZV/{AHPqκ[3D7(e(x1B$@`{c_!-`':h&{oLC! *p -D&ndޛ`V>ґቧet8ۯ2)_ZeXnR#ox2 bme}0Q~7Z`șcB~;E&`ӻ1~ܹks5,kG0cZ)IVoVGꫥlY@aY=dwo?va'T0koE85HbT/#c#PaiJ3bP]kl{H[˄>'n*hqc,Lph4GaJ GFU&1F %F~1 }V%'yI)}.RJ'I)}RJ4P^sN)>Ԝ>>#^fD;iee dlʴOgFCr̃PI PI 6I}Ve?%i9f1_Yьxv%o|7ލ܁]ynܴqDSZiܜ>[1x(ݍ *fv+WV='  [qv*ŭd6h珿ύ;/3.*ni/A=j,"Z (_h'=W!1 OΖ=Xgeܨ su>s:}k,fhu(Ϲ;Kq?ST*H,r4$I|8 ,aǙ`4lB ڮk&=qUܿ[J {c*[ %~@2o]Od(EC1 *HD$\_#W^U]W{j=RZ`Ɂ\XtQIIm™;#wO{tQs(̫܏nL }K˥Bn-&ҩe3 ǭc3Itm|\VX p^_3WD|O}x}}cEe%˶)Dmqp`4j8TI;*͠`%3vX/5ӯJJߞT.Qs'X͉^htۭӡ;CN=z(fQ¥ j/ 1 S s0ȄX&.؅:-bRB]H )vay 5NL `&BC1].)]Z# O0$?taGL C m~ŠlZTrۄsP~( a,01"Na-%@"t1FFS; ZH`O"$O=I7DpMwy2fDS͙<;]T\sg$0L3cl1Btw& RB?ǝ*$ %kLykʅz;v>v:)-6 jfPkx7Tq *x08h $Pb@/ib@%Pb@d@cJ_lw*ah,,Hgk!`LpoLv#3##SIpI6&|N?<,zBf&AsX|kg+tچƲfF$6C؞Mxw t[+c3NBZpֱ9}+-oD3goUd)|b~>3:k{y>[ꍢ ጦXbDW>spS3לx~?4|t)_:YB1P\{ER6ͩ"err%&H))ۧJS%,ߋ& ڹ&DP] R@ `aƻ Qz{js9OGyxG?.?<=@FƸztxzR5*r5 _-߿H_{%mw3: o;)ν>==o?H fX{Ϙ|Ϙ`y=+]D* SiP6Qɋ=7SWs 4x_ ϟk|ٞ1.MǛ&:u!zē%WKZ^1J@@mLY-:YZC Ǟ/X m)ki)b̤h۴]#ZwU|O_}`e_(t5Y%nk4/cZn_J- r%uCߦe??F`zewx^]i󔟷;/nKq8BP|x-:W>@/jrrztbn{B\P\6{ Q^*Nzt Cnrf&mHs&__qpj|p/.X8P|t4]+|O۵/]䣺onQ_{Q4~lidv=/Z!_dQz~zzz>b.m~~w?Ӡ;ʕɱLR Ü$SV9XHLޕ-D!'vK/^+oWO@}IܣJdX(RcZ pφ[ГIĴs&xqFχ-f lm% I lUpBCJ.;o)Lp/V^az"]l罃/PoOͅdՉ>M7^2O~dzۘ c4mTt.RF_IzR1w ^2r 75&]BA9HPXX3uSm̝#gtގϏOӈaud`O_t~2*wࢣo~e!Z (_WVً6룱\*T'$bztѵp3rًSq_Q" A(Og::>:{zpm//%sІHBDxr~x}|c s<2Jhb<&(Vr:?\KN m"]gzCƈ/-slf2Jɫ 5: _Wv%R\&5s Z;)S㤁{ >`ûg?6 w6SbEgZҴG'Y +<;ޣi|!nn{4TScy}E+vڣ^Q ^x<ڕv'%>nH+£ju9`Go7yɟu*}S8(lG359Vē4\ǟ5p)j %zN-A#d զ &^l@2Ȃwg({ctxhڗi +<X=6S#"L`3E0j5ga35mU)OF0Ǔ [2t!z\OvݩAUBwڵ'ɱZr!%?`޼xݒ3k; 6GZFv#s,Њrf5lދAt,(8 J׃T#PBS ţkٶ3PXwf{srގJ)xqbWْѤ6m6R<*@^ =c:r!FT3;vhR`'z!Pż縴!ޝlʧAYQZ.Jp4~Z6P9tmHK !vcO'rCWSm/!97 ߅cWok0ȼ/F5nC5`ԉ;zE^k |P|+ e ,pmj-XS zo@*6&3I721zU̪ںi5?A@ 3\k2(#"Z9AC 3(Yb.%"Q)H2z(]WkҒj:5uKYūfQjF^v-F]h׏vTb-Q]S0beU Z"SːzH\k T M"^(.)hTveozs1O)N}:0N`ȥE GL)g$!{#+bUp9~WU^~{sv'E]s0JЀ7i>*#vdW.w xr$T\]U-M+_w 哊N\Ql@:\j!ٲE~SG'S6SbE˴ Ex\z5s(e?{r `ti[PAL=КG.n07fp` qJ Y cFBZALQZ}É.=7rr0W&PȂҪPy"y$DގF.TbN$ ¢'T$ 2Z/u;9hq*xVcOM4Ŏ|SWLQ jEBMeH:ʛmb\`sJ+[.e&hg^dN=N/:qj ԒiY4KpqYFD^אg+q(ɱ4v0QŨԘ&ixO^ BcVZb'(NBE?jkJ&~ttIm_mT~S7PRi:ؖe"Q~B}v$3ʟitK<-vTʝ6VZ;~1 lCS*奵89:*h^h5x8QoJNTjTEDke7tw>Vf:/zIu]\/CKkAfB(7٪p+5"HN 4HֳxU95j[3.*JjTowVֻ{xc3}o,_dva9OYBjyxY2hct{+Ft h;.b77DFѷ[@cHL*f{oOR]|Uo,cధ }$?DwP2v?3_I ϬeOƒOētKÂJ ;a 06n3CsJTy *I8 ƱqGs30f='̬MSS HѪӶoF\qK@J} aOSђs!flJKw%ۑzT\@)iE b/(S×( C% k:i0l,Q"XQJF4ir*)en#( ̈́4'@wtuH dHȈ'0ɔS@V5J ^C VD/i%cb ʔZ|mLiT3DebR2_ht}(|d52)/=V,rJ-VbPl#| p mY^gDAb~5 Od,7>Voa 2 i(穀2#zCcB!(KPh`y P#Q F6rZ6, ӃB 5FOj \JGV2@! wnYpZ3ldS. byyz$*]%lX nI[!oɍ lt(ՒA?AУ-DEv4m}&f^Б  (x&iŸ-(:OqՆے*#vF{+ij,ƍZc`@=@4VWawABihiB Vu4lwGh%-V@vF{jxTG 5;>'>ص5K%Q׉ݳ5#m7ÚU(-XLd2&`2 ؒۀ!I m~(46#՚ CvfXФ7PќpHCp/Yg6n^(fa7] Ad$x4k6lSD[:WG#-]XN{:@.Vod`!9@L=k= i1\w#~cYTzSMKׯ_?1kO ~V?oyi}Ɍ a(ʴ&9-)AaJc9"9]&K÷F^zJa#hS XXVPnK8 J L3 TT6ɩ}^0Gi}FJŠo \*5^aݠ ցw^=v8v$Gtfs]h2>m-AAlu NS3zA\v.)q˭A@j4 PЅ4nG"maBDyˢ8n\37eP a4ʲ'u!A4O@舌Uׁ,ak;؉.vtAv Np%Ӂ2ňI22hPL1έ߲$)J 1Z 9)і:/^?^r7Yy>^ J`QJwk>PVi἟./rUyw&cyR4)1k/wQ`br4ͮ>;X k? )џBǠkt㡢S4p<5S] )tMBƪ)0a BW=uIgwڛ tNle'dj >^kw׫,<Ӳt2|\v}ZNU n ԊkA>_wNGQ+ 7iU:2B>sfaғߩ :uA侣u;2h{~H߾u!9D30% %'oY7Insn`JĔ "Rm$;B5X%f;+:"?\bY¡Gwda߼msVjGY_L!dGj˸'[6ܞBh˃6~W;G4_ S-F w薡!)K$[֍Z9[.@']VYv-!G#z֭;iLʙeSZQ֌%uZw|퍩jV|}~ݼA!Hss=/V'Xұ*ڭ~[]U;g ^Y[+w;6mv׷k@;Y!hOMҠVSf$*[YˬW4;l*Hp)ǫhoMZW4=1TEsZ<1ˬhOhn/?~"+\u, :$s$Ǩ ZQ ,W]w#rnZ/eNSzEyŽXl.T a8H~mpi5ߦLoIt[1..3D o)3>$ 6 g!F}K.+݋RZ =P#2ScX($iTsiƨ4NcfNcjjt(tj;e,58ry|$7jm-p*6=TQx%g-KblWg5ӕm6Y[ز&^hװ HN6ymD$!;#^I-[օ0vmF+`.lFh稖&\TF6xa$9FfCT1\gu&s{L"{VZ0Q,j Ox(@!)fB(,q{R1 q];3H2RGi5whRt|AQJKAɥPj,-C01FYsL{۬rg3- 8f^ъ0ItpL/qIۮ`,a]73łj.X rZn\k l?Ni!u6M%П~"Xrt-T$k9#jɐ"tP7H$$4ɣdȽDE2^"k KIψ{ɠ"U%{{%Z#l}hf'$$NfH,s].%fA2>]EvljZMͱzQIɺ'Yv7+K"̯o6s}{N_7߻uBaΞRVSXMڷ{N9ՄMqzzYⰌ>9@ni ZCm![Vӓ~zʋA3ԌhD5؈r_bV}պfoe{%jiߺtg!p7 K —^ W=wjm]Uc@FDY˷&/j%e8y^Gdi_?e^sJj.7g,+-w!cuca(uk1)';Zӟusے/>~ _ QOiX9ec~󒜈w{仱#9D_bRJɁw/g? G{>'#%~{Zȏ:[.]OTD?_$sIJo}De27k%(U_{ƣ n{N%Ie(VgbOU*PRw:~:~:~:~^'l^(YL%a1L)lݢBRNR I2(1޵-#W^@zg_`r;Rl|@@;U;Z4 J2/Ͽfn6m%C; `iܾ~sS-zOeQv9w;h{N#}4="ewvs8uNDB%*T %$/+ʔALIhdI CL1#ztlۖb5}1(?n.̨\'ߖHdT4}]׫@z:CXkPF㱰 <ɬKuXehVV eV8"3W~,ݽY'jZ6Q$%H[YPnO椚AYtyp%ξ@1=UXzirUh-h&ִҤ*ҒDqԠ4`s}\[û\0K&)B)4:4vpbw)Fh3cfJX]zktRFσO Ye:^4bϝC_x-7&?!6_Ef`2ǔ(j\:,COAAͥ1k~vU}?}WPԪ[xsG㚑l٥|#>#B*Û ż솿Ƕ%M5}ʎV.gpx Y.:1LR"ꆶIfq ka}ي@Wl̀2l,'ې )Ŀ>~[m>!}z%`[3KD35A>'(sEc|xհt=@:#EގR}F衟gw6!'_w50d1/K^E<| s=6qv[iOvXxzTe7qI n1=(~Џa|l|[ȏ% #a{*]`Y_q&}]+5pm,&[i2O) YtEb&i`X:.^:CYtqN D'ZX'Bd}q(P)M__!Ob"T?i#c ;3,v;.߉L bv:B@~ÉYbBJ_=;f[{O#zu;ȋftP6-./Auu{0)Bѥꅱg wMӀ[(^ܦӢbLUf.awP PYNXl]I׻0Qp_xC;Gʛ 's1p@o )aǀQ%z-v!{ڴ@)ӢBY LhG.)&-XSdf[?}^`ju*nW_ˊZ.,Ţf?691=4B 5x“|=N={Mx Oyuv{ʲ?W'$n -LGx~ǿ_r 58SJ(=_=y%k!6*=^bwp0BSHs%6ZH(9  '\"xS\hSAs/*A .ω{pD]:[d'n8'BR/g]'&) ܕw~a4E!)h/&cX&O GaW-;Hƒf#F陵8B[Un4dJ<eBy')K fZyX \< }̗E߄jg;tz'OEEYs2xr͆fx*xl9P*M^fImn9?#Py6P\&;gv@7`VW5H!^2APnͭLW,){f!nB*#2P (c  = sZ4;_tna \ y_>ּ&S YP7=x6;C>81zC^ZI1t9qFLʠh?XRZ8d5=5m kb@>4B#ࡱh^` HkvFݺ\ӣqkQRRK.aJBmFEK\Z׵njfS(̳,QB'( !̂kw\+(D3k]Ȝ;7;QP}'ZxQ{kWy8]gEd 3b2C(K-`{@9TRd`))K+\P5.X. X}X,u_!:?nJ,lnE6||)xT9#9Y34R߳/^nC(V{Zhᾙnen}V`҄L,KZ1(YxZeS e6]zŏ,`Ďǝ>^1r0hF>7!kvʿoҮv]z]7WZ7YkӀLV 8I#5z ufEHEa2,Yoww/_; R:|wM}]bl] fӞmWm˳MnH.iQG>&sH/]p 1wfR?揿^[[Pd^j#}ux.fwkF!; {Qp66xbKƚ l_+ &кc˲Z#ڽƍ'GH>؎:=|O;uɑ x!$S&EQD`KJx= B%A$lG. r[ΥN$VLOu~>:an'JZ.AFGPkVUNdrΩp-&c;núu;.߿~N8퇖9&;4єݓk1sLXђ:yXͦCcXģ44 jA\t{~Y}DN'"ִc&W|O1W# iɤ_ >BYs?BOGg:FJk5pLE=S#teۍ}蚤x&:~jڧGnG ?q!HSt"(S/Ƚ[?NT1y`L- \9~X؛c1`Ǯd--ZHi>$fzdx{1%F\7 ܉7S.04unhu9ܕ&łJ'hJF\mMʹ&ϙ sdhep]\4(fVcma:(6bRHRGq5j>NU[N[Eď>轴jIrf¶4 &f<54qe#2$ͪ"YbIŸR Y2iXj7ewԞDM;'„ThQ$#l%a6B:92dQcޝ (8Y$Zps0P{i#PLfUۼ|~PT }p)b"s[D%TZ[ Au.? t#JFO}"ѭk#.x1 n\N 9Ӏ/YrvFDtf\S)cMkIBHOgC\5sdEzuQ糄Hg{y^IhA7{|Cl!A Bw3KNlZC8 ]qFڊi̼OoÛgo?<׭Xo޻f]es0}vC0.ie(8<܄| 6!B|]z!eۭWۊQN oLj&F:\zw,(] {敛dPqG&wbW`* =2^4pUH0+7}o*#M/<-R=܃_P3+tqGd3K I7!׭#ldv14Di:jzgM;sf;+D^o`,k9 nw](@9`l yJ7[A4٫͞A-C˻P^ f{q7/J7l$w:'BЫ{V$>p)1ΰo:k%L_Jj:]^~׻a_NP^ݟ9Q#8bj9nmS( 8DJ> 2"Yk# tI.3&A%N0DZH@'Q[,ݩS7 XU<?ք5QY> ٪?5]YV~b~NJiЇu߆,&enJU+++i#3/KmυTVZBiz‚,D@,~*0?;~oG7;vxHՒC9>yTПw3FYr1VI{xҀ$f%$Qq89??<i*b柚e;xuwSWD;9`^\[Y4) _"zԂ<]ʿqXIuFQ;y$zQ3FsFmNydNÕ9'9>goN5MMNIOxlrΙ=HSƟ95pBfKl[Ng=$V-N|mgԶMĩp"dE*)u)^pssir*YN B 3xH95 \b(Q^ͩ9\ɗzjᑕUF  1,p+y)RNb.w{!vXUNT\^"1p9"Y z -J pT$3QSXi/daݽ (0,+ QObu~%&*s-r -$i# k)'QϏFɋOQtE-`ckIm~F! ! %tQ v E/FJfrJ Ш9bنFUF,;{C4*6a*J⒂5M OZL6%$Z;u"f3`+Aj/IILUNT 㒉9.k [MpvoMbl5-(Zr02E#m|{7(|dh}g-{d@\]0ȦURqq|Źw=0tݣo/PY?~&}Jg־|T{T?"<'?VItdk ANK[6W_ť|*/@}APYJVWGR˝*|m!WWy<,RlC<a;Jki|4{ԓ$ P*vJZu4houvwxF',Fˆkۭ5Z^{_(F'HK&ñ#κA6Z_]kȀ8b;FoPKγVpKK3lW`iTh7S[>uW^ %&y:D+Y*/hKW|UYtvnϠQ& 4Sڳw~7%26u2i?8HOvJTڔ_-ᒣNuEl7M[Tw)|<Ц>_˛Mz֍8}4Fޡ}ƋvgTC>ίdZX@eH¢n[tmb3n.s?.IqXIT\ߝݻOTSEi1u&v.>QE0Bʊ?ͽѻ<$pWCu4܃:PYspY;/|8~42/=x<ܡm5~qӅev÷<9|>ؖ.S\TZ Sme[Zg@Z"@gBޑ}j{=jp=1nǺG?Iȗ\b$ty!S6Mj \uz"3xRTw. e2-G^Y󌧮(#mW#Oj|X5O^5a;v}ɀ`M~g)v~e|0R wCrtWY!;<':6o1A'>x|ɸ2a@u<6qK:>KR*9^,Tw™clr c̤bf L%JzB^J|G=8y3G6/ )cSIOQ ;!Ań9 pgͅs_p 4j"=1vM߀`I֟]B#oZic^fm]bІفm+WĊRWuk*iH7>u*y]je^Cu ^KPab;eM *dk+S VTCN%d|UԎ V110rO۰{c`"!cYo+炳Z4ʼUhS٨[\mȲ㨕3hGnJAg,߾T`/c~"c =ϸN$s$yH hlfaxvG Bug3aǧ!u?iL$oG`ff-׹dևn4)ݑ)pn>5=޶<#)E>)ӳ_m9H- dpLP5'}3 l%& '. '}}y`u=RuKJipûfJj]w}d¥iizoi:YkfA;ɾ(8*Yߦo~O nlFbnZ+Q1&ZжƀSC;PBÎcu>߯ۋ 19=wR_o }{('PE"^_VgC>;h}fGQ frq(dJ/"hg4:z]b'ƿo~qnCb-\,r;@NI<ԱZ#I4}BS۪BFV/%|@~J`9::XO~zlմ"CT iA0Ymt۱!TYK'0 iB3l|$&xљ%"򘜨az u> G68E\PQhj)@U9iaשGyrHf݈7x>ƋSY@&,·, 5(`p=h%5 UdHT1ָ'r65 [. `lgr<ˌPc"}*K2̂*H 7>RMZItUC#X"~?G g-~j8Zm n@[NCzHKTR_zm!K$RM{pI8Q[D2+X7K1)g. $ks͐?A@d7A$ ޛ& P2 Űo~h 0:q> 1U؉ݾAbb߇j҉}#2yv8 ;.jsuu>o!b~닸ʑOEEp$Sz xZܑ]\:&4\"ݢɭ8q]Qί$1mIZpCq$ljWY:96-.tbmk}ʵHA)r ijɱCLF/:2mӭ*|\5NS"t]Q+vn .&(jjTPfkED]) mYb_]XW=+Z^Ӹ!P}sXa;ʁm0[ns[hW@:ejmSEWI@ *6 GydjZ&ÐC[{!Y|{3;62S^ɻPyi齵&׳S߷5ŧ_}5~7zFѣ˝BMUĵ! s."%zeź}hVK/4w]$5h%TH@^C 5d_vYc.Ւ|)\vq jd@ XA%p`O`d=ci^uƲwLB'*098w:19;mWLϻmm*dpIVbr. r}wh<5}'Ϥev R*򳳓</i#×Ag~n3tzr,rc+Dsa6M?Ÿ٧cOR ;;kCW5ɱ]^ou΅>kRP,t%GCtv91JZF Xg/i OꔳI&!*[m$\ب)@`nv9ikS)TݷW'V 7Ȱ6mCp ؕ|NZqҰdn%^:#.‘mIbi4J6S#6ɾ+ԈFoPp8$3m+J+JRmҭDNl$;5O3Ml2Mg#3M)md}Zɚ1\-",n_Í_K N }i!R!m/%zY5i.:vN1(jM^90X^b }˭h%8Хk =&=tYkȜZ|.m\%BLIw*kc+mfqeTu{ 0yOĵ*Ŕb% U=!Bܚ:T&(sp2] -?BT4ݝLHɔ^Pm@Sj]d5 JQ(JauS zz}["b //JSq_H ~Li.2*c0?eP0cZf'~= nNpvw~tXVcZRNigKWؿ!KUb/z3Kw ,~r3B/Gfj&?I1FclT(.Z ~6erTJyԤb6}5zt4+^hR9Ba-J\Vd}՛oӿcH}p;}hG6a׶Պ+s2\7L9zz"`3amQ ^8ŴNb6+Zbfm#ԙBe&Njq&\q⧊c=a]c3Ll{?'UO?2zh]={m[z2H̓x b b`[.c,j U! Y!:-DUR[&żXr7~5ׇ:wR=pUtIy )<V/Uڐ',(4O6y"JѹTSي}Po H-I66媲l\Uk\ Jmhv s\ĶVkSw?_IAR)=@؏&VڞU!e{hu\M>0 L j"U, j_eA8Th|<-VP[Dz93%P*HwiNmQl]uņI\Ŧ8S*r,q-լo0~8̬ҎYSouuyETZmQ]{ DuGuW%{,CZn@Ԓq-P]b r%U93UYg&VZM*7#Y3IUn4-S^k2'+MhNj6U]R5(4,B \n .4"YZMӓְ U 6 jCBM(װpѰ5c Ұfs-}<6|BZA_B-(tL6/ vFNk1ŧRˡFtw9O@hw_{Z|]WTs'Bm(zɗցeD D~'|( i)Xe #l(0Q q ͚f"1LBtQ8M)W>$%77򲈚sGƝˊٓ'\v^‹x9srEk5 :y&|Ht^^>Z0Я\$CӴtf͓f4 TO)a7sT¨ny.Tjv?\+r iKD{Q6!nwhAwaBA^[ Jfx䌿Z!K(9b~jp֘ŗ 8?p]:RƒiQ2 +8*e(xցL;hi}8S  )bg+'8ӉսE=Eqh]@m\˰!Usb䕠wYE/aLe6hXdBZb%ѾyL0%7#K}i)ZWHD$ڛ!BOחDL +^\nB 6tr,g¿yvթNd)[͗Fd*0J cy.!$buW_^.aFǴv_!==Cj_gQ$B[%yU.Wᡁ[<6W+'FR%%h6hWAOWCtnUS{KHk"YػMs@/gLE"-ЩjRl)T3%"gb<*Je*e2zOt4i[Nmn B#QR_ !pժ5rK!xfq&`D#9mWօ#Sy{Pavk>ێĎ?|'9GϣέOLdnb7X?-OdC ?8Ȧv5S:w>I*ӺQ*yTEq+; 9 77xXvDDO8k~3718Y;燇e:61|w3kP`:ޅO\+Mh+KKƼgec̮ȵή'1'yl|E6vaƼ65{d]9&^f&_ӄ҃ҏUx>ho -~ǖ׺j '58Gaz_ ǮRC8 ňInOJŔǯ>?I'7)&O 1w#Q1؈Ok_@DhSaviϮE*}͹t&M̕TJ`p|oRMJ8'"()^y"ZA`4F9Zz&k&$Ee 쬳g p/_`IE׈6<Oi Gpٮ :|,-bU)HK_m8!%97$*1xEdXǢT%t%7tIY7ꮗMYkƸ&g@Ě(-8t(C (NQH9-{PA Fڈ,*\OcȜd ۚF΁\6pq˹[[ n:ĬȈQʒ H jt@Ceh㴼gd@諰*-v&v̋)סI! VpPTd܂Y(TAQIh1{J=l)zvm[ 3(D(T*ՠّh] n775@0lC#zj~vu\aKDaѩf#eq*/J Uz45|["\>"Hd(!u, p];]o!j4'[j?Td/Էb w>. ]@O~Kѣo8Sr!=~ո-{1,GtHex?& Vr7xZؑ3e7![L&}"K9:,h M QϱٽES# y"Fx2lj-t筑6GTmY#mD US­'W.-dJҍlwݪ~\:|%xw3ǩ㣧t}Iѽz=VO>A5d'o?:<+z'!9fſ)r̓% a N9LH4(Tкs/ilr LO [[ %A:X$Tx6F8>@6с1uI!`=~{i*84UˎIdQnZFn4ydJik)Rj> 5Aמ2oq"νqeWr)gv°(A\ B!÷hM>wgi[ϑִ%/ֈME*QPh@M낯XrF8t?ZF9>~<}|,޽A1X|O!$J'1|G_|{,Ԓ-6J jѺB@jPXKE!KU'^RH֊ q,:4 _J䰉,AqMEi5p.|%z#io YhZ[Z٩ȐicjLɪ&ۀt.y a&sIOAҰVIH%f^9/@nj[w$w^[AndeYUe[~@,mt-{{ kVEfwx~nY7a ½@cűf,zY*oeW>}*4hqts+_89ݩ# GzG4dj: v!4664VM#FӃ3dHWpD|%/zDB9'$ByuWx>z1noTĠ|o]0arޙ/rM T]vd:~黇aۑ qg řdQ>9Uu4/1%*KB- >fgFY½_G?A_Djra(gѣE/#DD1;EIAc=12㔔p $?\ E?ʪ>kC w5 KWUèc߻+0yx/]I]'G/W vkp0CWK`z>'2O EBu!xM1ů'Iu>8"9sNvU53YX%ҧnfS=+ugs10W^!}SU9xrz=ԮVU ו<[sD?4,UY^[W\un{6MI0姇j QvoI/2wJAy-|YT2g2#LT_sMJPlfop=W ĥ#wpG0~wDv;܁Ò[M }Fdvo ٥#w' K Mޭ߾mjG:sw'ou:;U>8u OT7[kiI\T?+ zNJwn\KY7f'D=Np3+[_/bVE+SuƵ ^7VN佮6NWׯYz6BJ=gX8XBk㱿Ŵ̏u_O׊ڇFYs!-. BeALK̅&s^%Bm61lJ?޴wJE 7Hq3JR"p Y7jc>}l ? N!!^s1)?^h\En%-I3M9ul(uu [ TUڋk]_ j oUn g|,a AWfox_\p#=pYct^,@$DB K"X`#>J}~M+G' \$$ޡ HWZRdWjudS]TU9V;V' (^4΍a`fׯ k,oj!2Y e^u13^ĴY")o0Q'ߞ!O3_|/Z(Eu/A4f(s+%:hp:$PT/G6rnŽjOG Q*;K!t!'*!P@8*2S80c۾UOgrim\]+*9G]4v'g@H@.dVYG>v}Lk9*>Z"$c+) }1"RCzaW+xm}N WS˪wzE1#֏b/q3e]Vrx %*UֱR9UݭCz?0:HkZ/\oɣQ;:w`8 7FͪNb~*PypRLW׏jq};כO_Wk7\o}pឃWa괞ڞVr_Ъkg|S@mI8&6 MϛǻNMmڈ;^?|5JsM1Bۚ>mx]`Vva'b`Vg2o5N֌*:Sk%OjԝS~.;uHKԟ5yxqv{0^QgIMEOKXkviϘglv(CW_n?}ހīNoiZn?'cXЉRbW'sl=8-[U7[)v>?=8b<*i\":2+ 4rY~v PyMӧ.q[YE7۩¬Kv(V{?zQޮ_"a2AA=+F0dG,X$/(r_@o5z"`jP']ݸj^U#DAg59 ¸~z@ç0 ()1}7bQ)c#>bQdԛӃ{b ϨAwuXGശ gt@p'8 wzShIYQW5ЇtpUiJU=HVb2UîNǵj[}j4?&9zV~u+}TUwngVtX[kIgvqԐ`N@N`-bYƱ\BItLtgteqfxQDA2xGu:Pj(rP1o$t@5tѤ0݈Ps`q겄)aiGj'D5"&XV(5"8!W#s``H0H)B~{Yut 37UhOA#z|R.&+vRrrǻK̰,]W])~Rpe7ezZ{w_F ea+JT Ųol3bPBQ&Gn?fvHmD?,5Q{o~IP<=(KmP>,}%VУ[_ - ғ{z NO?v3Ƞmwm/L3Wgf.k7e[[+0\gޛo{i=e{s@!]%_K+ ?ɘ > !- ]]# dza/jGb}*8NPpVbB1H_6܊-?䤌DKZ`@U_gms?V4k.3! DO=EWs^j׸}V&¹%R%)e#)8Q+JaǤ/] KU(@bj]\! eH9`);$\6 Kc! OkqY3^\5|e&(QWN`.̎Ux[vєN\ 9{|ѐPV$l3/>ȭznփݥgJ[@f.29nWOggWd1ح @=O M$n89|e&)PH#E@Œ$3YP ʕ"48.̸֊cnԳGVbwKsF`Jd#Z5T63twWck56>^3d'R*AO/Ē$+}2ʌBp|* m$2efp!3Bt.Q@Jd ؚFoXkZkY( E &1+?;/;L3a@SD{p"Kٻ7ndW,9ŀlg@dl^fh&Ɠ uiFlղLln*Ūb勱"0to0b<0mt7ƒPT};4 j٢l`H 94⃧b90~3/dQ:8%J+vB!G\)d"KŒ; i,6cj9@ 8=ȧHd{A7:|))cLUŔ 6u0(U:ƟtACGTyدO*aQ @1u:OgŞIpNЂ9 e ¬tܾznbHUV^_Z[}wp٨npSݕm_L$Z9aHVhi}%Ds`u.bc)ݼ̱ƶ 9Ndr M43Ks/tX\=cg\`])2[LgϜM=hL&w[7_Ups;z34/9` j7@Z69]J`;?ZJݕi"L_#$(e=0 0 '7UF S$GD\XFDQ {o . L__H܎D8t|`5Eɹ4úAԠs`e# Rʏl7(%k4H\IQ)pTJK+Xr^oP eb֡Q<@*e,gWwa^G.%*! 9{f4URIuMe`UƱK57T1R +l{.%ŕt$+󓬤9d9Bͧl3f/v4{;hxO`4H|hGt<[.1/zaލ; B?G !,,V8D6H0{ZBCzd]sDS;޽\ݺs&%>(G"TltRFkj a4n\{`BUR2iV.cg.LDJG  e؎`緀(n"+ X>̜-κq|ofHnr9I^(bJc zJxO/;M'W6Id#)+`QĈ Y 5_ AKY@JJ M #^'U81R J3S 4x_W.:(b%^<& )܌$Ԏؐ$k*H5 $"Sr"\ !VPGpXPXW KdKNrԆ$wH͒$h$ 72'Sشrsl20 +0ݟNVrזJ! V73' [3ץ) \DDI(|8nā,(ºYRa39) u a0-/1AL @hFؿIB%1L q皃Qʚ`Uft}\rGu03=8MeW]!]0u6 g@d+,XNws=,|o92~zۘ-µ&շ_ _<=0yx tǏ77Xb3@̍ξ_g0CH.ڀ\eR>?=}&zJL~]~)2M\jdC7q}Lgwc?2+nW[y/Pa:H%? + GO)$~0JF<=00RY᭿]LX}}Dg3ƷA=C[nGxf|m3&l iw\(͢Mp&m+m J3kU=J^ѫsi猬o`W0f&!{}`ҵO'UJp{ɧW4NBE+1vM-ĝe]u> zEN0'.VV#1bR턜5`j ON Oj/h|2j"-uF-Zy6Ѥk(VG\ C ),JGK v*+uɖIz,!*2ĥ%IpOpaG 5gT;bݲv]dlU #ϮqS֪fDHnJ5Lz\h.qv]s:嚵bO+5&JkP rppC %m"|;z<%\ya(˞6/m]6#\6wxWָ/j֡QL×s>FS:nFVFMF(]nbfĨR%h_%L&|y؆~],O>@:?*! )%V:v#ˏM4nTټxMdNY{vHx`MXO*v"R8ҚoGZjCSʷtӚg,q{$zbO+SQzN(&}T')ה<h(gN$彧kp v/}\ϡN${:auXe(q|5SƝb!Q;famK&5; Yw,a \0ZhOn!BrUH9o7xA֝h*hGy5w&stѺ~7'@J8zs,֝wqj=%>G;&{b.wzJ֚H*6j@EpWԣ8D)yh0.q)SzU!g$: ^")ĥ1V#Z 0>KӪFsͫGA;vOɺ|\fC?-E`8r΃7 aL) 8 Jd'^`m ]T`A F^ ja'@ڀ\ݦ) D!yJBQfYI09o=Eq%2`؝˓ CҖ20r M54Wxw1MŽ_$ۏOvJwzc@'|5oɜ4࿧?W߿4_.lѭo "@;vrh^ h2-h|2Lb3@̍ξ_>s짇Gw)YOn~EE<;^J&vOr\jDnЀMl8p~R) [+?~-A(NGFHӜJJV+ H 93`[(-]<vRFqgv+mnģ254mW ^1V}GWޘ!+sΨYy1JT*hM< Kxxeӛ Kl)cdśv<(c;3wc;!+mh$o?^UȒ:żxj?Vnւ@ 8_quSZo"yeVN+<JAR8Ŵ1&&cꥮF#ĻgVq<%.IcB]G6~nQy‰e¸I9 (rLBD5B/*S sUVb">caK)\\ܛK{+ƞU]v:.*/Igu35;ۭIt0`x`2DRObXsy 4#coǍf"G_Nοvkcr%#\dqt̠e\4P}0;O;U0^.^ fևntIYBXTj*،Q%h m̨]ō ! z.udW{+2@,`ݪ*wj3`oȨC]b*w>f )p#|)rrKLʈIrVs%zb6E12mOf%9TU`.UI-EBPu_0Iq)vr7}"( 7 שwJJr8y@Š2>$ب 9 Ϝ,MEo|ۤ !1a g- A+|䜏mH']R;8DUmJ0 S*]ɒ6&cxT}a?N7k\kXsuRsL\^*Ƒ^K]{tA\hymeY!mbgl{־cGvҟٳ5ċHض/~VL[ፐJ6.&$XKvFuNT4!]Oh>w [#p'5? 6~ b څZ0]2FI^ɧ[>pVhyh G~X1 J>c=qX6(#)MY040<.%,8?dDœ6 *1kfOD23>i@L)6?Fhs`^ l oޟ> Nlq;4=kۏ0[M~.G`֠y7/OI}zٽofuOF쿮^<7o_:p4G[ffjfu~:|q󗿟AJ{؜|z?ݶ>nluZw/[X>hV';y=Ybׯ5c-@]u_r,BGuO=NܦYqAo>d*7}>5,%֝g{R0{2=xvKa;,v5IzE?P^ G Y-}iZdJ7[`jsq~cxؘ3:p%l~<K52)>!/rxn>O[<y.b$zF-;eNkÏ~ 05:ߝdk.yw2(<!n &껓1yFP:SXr}8 !?&Bl1_]{nv}ld/,"טԙVp+KGɒP hј=7Q;玻8Y\=0s%'kfmCkYY _ݎz3l঒Z5ninޝ4keڵfKdS ̞~##nQdFKܗ* ¦=yc&ȧmTPg'+@k7"D"$ I$*xDzkrC jw (|gN9^В2ִ֑p͑]&v%tN \SUrq˴O9y5^Ǯ=[[J{5&<~|-ӥo0هT|d> #k8mQ& 0.i<{uH,Z$4ƹVi'AxQOlIw*hّXUX-?1e^Ɠ|UZn'fO@ G I(bB'eT2(fLO1=E{So]y;;m.PN -RlAѽ }~3'U;ȧx =J;w rQI/ٙf[%8U3;w]-DǾYTNy/^?;*S/Z1qM߽I=["Mg$(b|cŰ>,Z!rЋ!wr<^#',v'ҩ )_vmE_'緯x~:߾.WB,:`JlL])6 9A%8/CJfRkGjp&.ƫLZՆ7Yw!}t;W_fQ(k6FArX>Wz) h+D,*ƓOJBrσ ,G7:^q/}\/(N?+4M &Z8FHZBAX:eB(m@ BWzUm`%G.S9Y,K1/._^ͫKfWd۹CD‘9,5ȀBPJ+ 04ca=֔(@+!0|as9c6/[kEJ\_U =XV[0,3V-+OJ2L` ڪlyIlyvıcIRƵq[DrX+x0z)CH`pzx*G RR#DJhDڀc'M%VUE AmǢG|tJ9"4A!KX@Za&04Y(pE96LS'q#!h찅aYƗ2.N@Υ N+ ;r8QI@BfVӌq%x9 X\휣KW3ξc[^rDZ>pd`LK!BrDCp@ 0 ; Rdr@ZDÊ$ + BԳT@H9YPQ!A2!Nx. B?`f"JN^hҩ@N[͍CF`O Dj1ut#)MLSIiJ;%8ERx<0@1aKT 0бF\x!HZ1$`'΂8 BVݔt`1H~@+З+%v4‚`*1x^â LSr|BTJhmp6Q;ˢtZ¤lP҉C0lܒ7/TÐ&`鹡Ʀ:X㕕a0,s1ld*NEXih5=@ + иAde0hOE0X9@rQ#,ӊQ VL"*mw똧$Yv ;Jcqݸt 7/&?:㨎P)$/&N8 +Ilʊ޶:1avB I7nQj8`8*?V1gQ~ZH]WFs~GZ>RPLr&SN`5Uۅ"a \԰9ɣV:wQZ ͭ) .9pa)ВIfo_XuÎ~JD4s\}nI>*THb GWkyE7aOf 5>)Ut"uws޿,x?3/*# IwTcUqpX B9[{dT?6 94_t9WKͻN0h^Na*EܹBgpl lM4$>iĮ[Bb滩Aj cz~VȲ{YGcA> AMzp#&yP ԙ'O"}îSKvw9ց55[ɴ֣gEhQ2/&=+x!^Ep"k-,ȚxdВ6QR=YjzffV`*bV hg(]uu|xasI#<˫ Zy~Ad _㥟z+}p_v)Wːg!$ wZ<ڎqC,W_ܵLxKXogYo(ϫ Aؚ(rT,G!wHDX,'#0&5q^x]d$ y5 ,%(e"b1TB)"ħt2˪ UJfYAJcRB*IȖI8%#5I=]ks6+әR%3v:۝ʹ=_v'FIO?dKDJHP6B/xWIg*Yi\kyŊ4YL ̣P^= tugz% shGRZY) DU7%41U!ƂFpk;_d{BCLFV3&$Ji IDh IÄ]N,5*`RT%@خ' b% Ȅ qGMO 0q,qD0Nx1evɦ?JZЃl cvv *oX͌a6BcւKs2n݃Ӎ}H_m›:BL Ns%m0W,jI1KcC%,,x,,5$!ZZj8,4uPk'Y&2֬i5,=`1UNTBiɒ+ە`L=ةsVq^Fg#-Xg_NJS*0YqAuX; d8Ale8QT^ Ea` LO\9~T+p sc-"JVl=8wMza].C \l|}"e1 ,fV~ګ}ԽԳԳžb4T ,۫_Aٜ(>gOm@~%øc$pJڻua :L̏مɛ]O X*O*W@|& jkA׀ฺ uv 4:UZ8CFpm$ۿ@5DZΘ)1$D4vv3McLB\dȵF]:Yl u[YEۅh/DܻF1 e3I$\%&vӄ ٩2k(fqrUCj}ܝ¡??,-k xÝv:ܡS=䈑){W>My;ib5X~nwwW?Kl}^.޾y#?~+~9\hiFW3|aOd9WJ)͢6lֹw]@US\[]=pZclfA ENH" J$w~tq?L<1s5pNtsl!+aiapJ>I U&}jmt7`~\?wVxZmPy 얟~I]5Ii2#!Di^QևJH@W627>\תb5~8Niua-Tniˆڃc'∣x`l4rT58UNݴ$O+: u>pnO3QFZ񍰛rck2\ܤD1nd:/uƍWT>n*A]5MlL\,<[t̒$`4MlJ"ؾԷ"}s\WՐ e}8wNcdkaߑt3 ;p}S܅߶)b`µ V`0_lNN]ƿh݋7.|r\ԓ2-5dHАr֒ $7V޺zn&$ZB۸4(.z/ԋsRSE ;#l !i΢TfqbL#$0J;*C1/yLagW GɎ =. Y-T$y4gbS"JQ~c6+x!waa)Q`F5Df1t?وJ/Լ.VBFQHh9 n~;'lTH,T 1O:ikz &"+ B0إP.絭}(mu9‘FL ihJJ\ʨ -9f|PLIˈ]΄-P;Ǽ`APۤ_<6܃uLF`Y!ۣײ\-\$>{vvHǑ<< s Ze$g(W,rK`>Nzo3xs fbpU{k?اO1B [2ĩPEt:5 Qz4͹ht3F7waW?L r5kl%aJ$oA JwC$l |1 7JܑOE!'k4#pm|]+oIDHK$"l)bI nw|m}-6WǶ~ /):] x%N!:Ngm&}\O1Umhx>-Edy\@mi t C;wI[C{9 w&[9Hvzf)/R[x.(gt^ J9GqMm x (W꺔ůp.W{&4 `~; ;Ს0Yzo[lSI :ʣXevHHW‚^ _|7@1!}(͹W#suճ+[2PG%0b! xZ$ )r]P%\,`]{Quc*K7bMhd $YrD<% C8. (yFgdk6/4ɁpYUSi=*jqPkq {%8X@I30Ti`B/zQ BUC.X۝gO+-~b@/n \aOK=q($o$m$)T),0AݎmG%RKhI!tyF[.fr=N54zfz${ uZusT%9@KWăPMkxEicb?A_g7%d$1I$QJ$UH*&꨾A-uA!}~Gx5?ܟ/^<Ðcx@EdXe8]K4eE3ŘdQNI)Z 릟u'˅/ Lb$3އί^]|ĊL\k8Ja] fKD;Y1v/ܥf`~3+~78Q&xqz=m)@n?L齽Ќ<}YtmT q5Ͳ@iI||߽ll?_}Kn|ށ ihB8OD kk@"Su@4K2mkғзmnkh2sbjZ/D2u%zTcu:M}y(U- dhYYPETJ\%tsU}E'K=$,MǙ`'ki*|mAHnƂŷs_\%!a<-PAP [N>T&}5ٗPHԛ,#F|M6 #u[ @](YAf,4U9uGWFRɗϟk(B%W͆{/9@yzU7MbВ(Qq1X$q*"b7X*@oCqmh.cjKIA(eUN#P-zYF!ַ|&dS#禅4~\vKԃ^/mizƊz(E[2AGU+@`a8`D'øP(m?⃊| 2܃)L]u_a(Ec7wES7"̈h:"u͜ÃM' Kp=>!tа/[{E&)z%a&$ؼ. rd4P\ ݶP=>L P. \IL-~[,-Q|{\,UwE," sd&n7MGzi>?|F+= >^L'&|bfv,- 4gRşX' =)`H1tF ?{̱ܶl/㦙֞mӓ%W$>wA%K6dUӉE$. bc'YbR861;IDk0ˬYs?sB[~ޥ]3vQVçO   #Paڠk8l ۶Ptnta%+g>i*œG!^YhBo(~~ w/}"K'",&"/zmS -WG`0cu)X8P%FclEz}gCʯb#c@]ne$A(HbX%c5B f(lu-q V4#$Q"s$vR; 'D  &E7 ~ 0ĬyZ`Ƅ#n݂,! .(^㜄!,y,kXS[Sj^ҍ Yg/.E¡ul9\YwZX:æ6N*c@z"D3*IE$#*$8)AƂUIP8mJbtM< |^^ߗ3+mв˙/pF%XYJz*HIdN9jPL{ȨqwzT8R\NU4zVIU$;kD뚻4O<ةŽ@c]A͌j 䱝S!AjehG•!uKu{ޞ(CLR~ BR fA\ 4@.PDLy2%\ZfP#2Pϩ/2J!.e59(Jv{"I1c-V %hrP@&L4Z&&'ljuӑX'kg*LC>2)^kޥč][<On/!%CK.G yXPlO4\N_ 01hAVQF B`/ >o \r](]fb1;3QrCcwkڑu- #ث}Pڄ!g[|Ѩ+" Oï1Rl4~LQ|fbX!bSQػ7c䣂 ag0D0nM0Vd{^p̪ gH$id-څRjb&d&ƽo&w 7!(}UX -}J@ݦ#}ͰjuNw=E"TL% ' T| 0 H'?KVʂNVe_ʖ@!-O뱕btcUI8([yet>1D "Y>N,!{_ D %VR-1YnR#(J&RRi呓jV# C>LZwuX+`?6~Nn4LOYp1'o_ 52W0褉lv0P.~҄|>wx^^S!bqhE\Z~!:b\&*8r9l(1<jLMVj߇ASL[Lf01lB_aҕi`mWX2_=]JɴwR0627l`L+-Γ&lqZ\V X8?x#O81sutqMap.ԅ/[yzV_r?OZEZtOnVggf-uD& S'@ElhaFꀗY1'K!0=QukwbTq:4&9fepX4v<$q}Jy7IQgJpRI8@,fâCBkU5 K)6^H *{3?ƹ{V<.l>1=SX2S1r0D)T,(]Œf3I$$2ȲL1):V-fIt( @F7g(_q0vf[KD4t;YAǑ,NbX1EaL"b5\3gbKLe2Que%99dns3.d|}R (#`Fec c (N×9q\8Ę#RF9=̓Pjr{{b艹V,,s($X:6ދ K J"D 1G:s@”pm M?GXKTٺlDSP}^$|A]E9^_'C4y]PGmWo aT^X`]0ukaebY]fRKa?{b1i6_uݞ!{!^4<W jO*f >K3?5G!T'0" w (,,R9U| _# 0kg|260`xߺYǜ5Wv<")fW싃&e6m5|ݺ kyy'x5w]JQAj n8X8P_DZKx73A@Rɺ\|T̺@ iZ??b 5q'L0R#'LrrVEX4-V!"40_!X/ΐ JVY5pd#Qha(WZC+GUd.5UF+P TVe R+k+ .jE0 \Jl53[ tyR83~ V1g=36n3(h*kr[oq*j̼}PaF|P"QW#-jr3>( k7~m\ژ7nJ.7<, mpyJ`rl6 LFtU ЛR8^"Z]L0S./>>LBYt.F L<Ѽާ,?F+ ߻jOQ.o0_'yBfxq쓐Nz7(9X zOERk?w"ao'6 R2R8dR1su§K\ R+`^W2kDvd?^[1"VLN?n;LQ| $zn3 %Mbwnkmn[i0GO֟ͤq8󓕲{Nt|VN^o|oq}¿GǧG?gzˍh eډjs6-X6]p;I;qmqv-ʟBAmۜs*,;'nϨ'?] .竱_g|0Y&7fYos̍:jF| zڣ{O@gdM.'鵧$oQh^}3NhZ1:: N0ND0Л~ot]hI&Շkz_E2㯡W>W~!O__\1C^6xe6<w|v4}[_9 ;x?3b\.k8^n3u6ǹ]81I3RH܏?{WqP}3$Oy2sthsI䮬<O !9Cp 9U]]U]U-Mb0k)6`|&ZU0o6V`fa1`1/ ?|Oyl.zdBw.7M?,/%YJHwM^gv+51֨%WCN<樔Tρ,mq[DiJxMH|u{{}bD$Bߛ=Dcf;2r- (nS61_r7#<9ɽu<}ݧ4|>;(`LdT<7zƊ.fZAcV-rOfS CXwSe.>OΡ(eKWdKoG&^&P^k7B.ƒi&ۋD.l(O`uHVաn5 o b{|#z+yQɹniS8o% vfšџlhӸl2[^-=f۫cbXpbcAScDETkÓep*D}Vg8Qԋkvɉz2k ;nN>H^)y2-unr7o֔|u!uǑj^; 8bU!|5~$?/؞L8T&<'pbE[y݇[#[9Q ;W=C9ӴY Y4uy*lU5 F/["tp?H. ]:}=zIp4 |ʹyBYmsI ^!=P2xU㫯u՚JPi-ѵWvF_$Q06ռqJ+(ޟ^=lVn׷o?ZwBӏ?/R5ar1w]5G fa-+_<ٽIGSʰoxl4,m2 )46Hx{UGX9W5dwk2Q6+L/=Ǟ>v˾ Sy%: @јL8Mbj %:_l%#+ᇡ}32"Wܖp5? Smiڵf~o# :\|As^'Շ{"|$a u\D4&H4O͏ bdKUoSWCܺ}u*G LiK(!ƑN%a(LP&J$!*#f9͞+Z7܏voB?nYv/dy"&ӫnGQ(DŽ[r:D1U#+D2f RJY*CfQB{]9P.6rhyA+BmEIab!bMBD1ŒČf#X3"EDA!|BKkti(a=m~~8EQƁaeCh9Qb-Y'.1 4-x(L@I"X$8 #Pc@8N$N\t'fW"c?fXöx8b2p3bD F܂1DJ-) SI"p1)CXNft "'tEq!ܠ:sjN`ZFV& Nv }nM ֐L. gL{X)qRQKD3,N?FChK"#5|XYB"` ZQFc*<$1s&w^`7WO,@!1ؿ؈$14XKK\$\laI0ᛦXZzqNAp߆Y8S!ݻtiڂ*+ g&;q</^6[]:H3/4)h kvYΕow.*r9m֙=KAX^K \N$0!ח!onWG340'O{hgRż{e~z~?K1\,-Jpxd F./o}&0G_C PSV$?t))UPL%QWpKΏ =r@HQAw't@c V"Td-5Q*K<@8` A慲fRT%c"kT*peH_۷\ޤJo];f_zDp봿 &8L'R$o\G * EH"C򶂷衒0ҧ˛D\"r-q[dOvt)rɋ^ ©B 9RQ~6@זvF6J*-% \ԏJ!&UO%KP.hABsnXz 1E5ucMUS+Ʈ,*퉝%G)t*9c)#-{!sUuԜWf^+A ]x!m RO(X==vYp,4fC`L+$O_U#SXGI?؆H(ã >QFf/{tIܐ( Ӕ(F"DP6Ilc a 㡌5v0:i*y>=#AUFv8 9|s]OXGьcҮAv H6m|B\~׫DwrDlxHk"UzB\ m|>N~C>r]^ Sazyr V+i߾KpKqb|Ԩ`6ŷ d9܋fM.hES~v޶k/?ў];g\fd@ -]_~Oҩ%dJpv;:.n;ߐ H}\YSgٰlgl z?ǻοz{7zC/{$FRvǭjb-\%.Ln NX:!QʨOՋ1)h8KّUsCahEЅ;g,,s- #Z>MKU|u ,&Gר4Q*עBbk(DΤ͸#!3.538y/q]$Y$cvdWS5"],eVϬ:g.m\sm^18"w&F&\wcEۏ^euɴM81S8LSTbZ?Eu2)dwzGnv5],\ !;ԩEQ %b 0yyJ`(]zmX"g~*sMN#r>F[`\YVd Gqu\`4sT͔، @H4P]kс &hbFw7&mꆇZhawT}hHw"Z"61FR-t"#.$te`—J8HKI&dV0vzifbadT7>*]aղi^SEzKk*AUsɖ1S[ ] vEIy\DDY,,/P!b&41vg؈Œ6"&2g]a˙$V7S$!Ja"Ayd04"J'D+(&0q>GW7.S [*oC/T:q;pɥE^X!A,ס]"jX pp32Oq? +8@;Nk-l8]yW6dW/3\ež W6HUI:ߜTjئ|o>|Li=c0ҕ[K߇E8r-3Mi_qsC{01:n":~Y|x?vfFqRxɄ.~Ow.3ZIF=#p.#~KaD]uFV^akClZzph|)7Vov>xnlRfTc 58ʗ-g˟ʝrV9rW{e{i;3cG6gA*/#5GwhҜ랝Mz9~lfS\fN$n2ruȖMWOᰱ~j-joq2|Iێp1*o]ytC5ƶ5T vRF)w.뜃;?j/g/̼YG˜]~qEZ LR*mE!J4Kp,Mh&!\"+Q$L'ZM(pAC„cMlfCa?WlJiCmDX(#wh,hȈ0$E.]ivEG -lU'Wz1) !IX֦OOn_/KI]JRծ"Tp^e;TŏSJwNjJC XICLhkBls͌8vbN87 йblGT[fɚhvFR~ ZKsp0PJZ>WJCL!qu;ͅRBvTVX0ծ1 z{)ż=5čj%enP>Ň7%3mD>爮k"XhMIYm7tr7t?N[Au=ls:#a!̆*&ޕqe_dV}wr y6"KbKJc.wgPlH{ U]]]ýK_=/\䇤Vq;}5MR5_)Ƽy+L1^|Ӷ=t{6^댏MXޗ"=/F$0$=]DZΩ{'mtn[tfyZ#BJiUo:{m2x )=H_n-:xDC,ԬK;"7 >]͐6bKdn;TNy^;ECBc4VK;EABb1&{ N3_KFEc ьciJl}si2)Sw߮ K]H("4,;A/ë+qW*ë7zXp*c)iQM MXo|ژ<(-O>?ئ缡䵐ՕC}yïqxnХgבI]PR&X*6loo2{n2qvu>soBƳk29́hjEjQ]-w!`*%tPCϕ KB͢嘺"P*!ue <7 8M^G%/)_J"\h dž0t53S+ ^N/J]$2Myty&[AX_OR܄I3?Le9=o9e Q݄NCw?{wx|%oXVY*)9J%%G(UKJ>)L%%X_ #'3G% 3O<ȳ|MMm>@Z "է?3@6\Θb{-q$.ҡ: U0-QNӸ@^7.2c|nFj!SHR)QJQJU7R<*,"+#^ӒsoqVFjkR ``237RMjt#tr,/.P"֔0[kFU()QR@GUZ]{ %фb Uj9R΂Ơ$6{5k:2r{|&9pG^ɈVθȔ/ȵ.c`3#,` .ՁOiz |@8[y _NlԗQ2*gR[Jˊ{jp"Q@As\Skc : HOBYg㞋*y'Up[UVk0ܸ<t_U`/C[ކϦ^ 8|Ey|yT`a {-@h^/;o>o5mX65E DSQݾu&ek0+dG;_;=>}?ώYt ۏOdS41hLco;~7A~#3ТzG9Jx|}}uu~`!W?s]'?<=ooD*ڐVtg鳛?R ggV?f U/BI i h&_%D|J.?dJG!!|Ǻֻ|J[-twt´[.t#PޙWSphrSLdsL:#;Q=w|u6{1yh ѥQJr8fӬI Dk¸Ut\Jd`ZnAjBZ,Zd )>/#QE{ M7YY_T "rςL"00TܨwJʼn.ɂ>;B\/o(#5I0 HW (eHBQX9СU>BO A9#-䈄ai9:P:$UWO 5增'pfhfR|F(fɏżGCI~f}J=&|na$UF!R+NULs-&ըQX3OrQ-rji-ȵpqnr^Aci]@ҜP7UR&z1,cY2VJY. DY+ӽ3s5me(L BI#PZI`OJ`BItX45Q,_fۣSLn=ɷWw3pLb-nwu\'bb R(pS]aWI (Y (bX s#`KW#Ֆ^XJ%Rn` jϤPr-12Mนe48TTѲ!-mZ˄Ƴk4vtHgƤJOt1]D]VWB%(֯Z]U)Z2.K^KgR%c:RQ*0>rYR}@2)AeIʵ!,v*r@h˵rhn@^T 流ƥ\X?6MRvӞuG\wuG\wT=׭vLZj*z)% +?/DUh@V-tFӚ >?_P2Jԛl,b8eIkBf=@Rq[Hac qLaLp6%l-Oc jQ)8 ]#+pX3ơ(T%*`r5[7kz,꡸-;E/?t5#Y/g]2՗6؛ rx!F,O5 %֌Csq"qSUuݦ5>Y3"Xcpyc +>ț%_;%۾t~YpnצffjgFbYؽ NP'{}D__tg?\LGtZrP+OuHNb68d/WSMRHA4(ė9{wxӒ+Bht[CE !JF17a#H#`[-+:o@%x#eۉL-xgͽ}Җxv(i%X4x9yhk~zlKeZk-55>&yfiݽ"ZuסҪnG۱kGv S+YBNǡ:sM?$% )!o#<>l0[{vЊp^"n {q#[g`g*Q *;#\"{S*$mlw)R˜I;L1r~6rSnX1r'sD- cF:x5*( ɗ(,ؒD5fk]+ORȡ 9W 'OoFJk==8&˸@1rU#zh i {0;>=w{Ë́qqԆjmr"4V^ @DBs; a&vCׂ/(܉1R]|1C ܵZ6Tvݩw~yo3́g,|IYܕBknA4nWj _յzYc~8ˣZ.ۏ߿W{>]b9#clopͣ{=|5BG^d?R#H1ykݳ-ym'(WCLVZB2PlVF/`"DAcRRE<vH{L-)SRht@AY'O(O1)ǟpV)*Ŏ1vZ BSu:`"S*3tA&p$2X%N JP;ر +Fhx}⧙N5l]NmD1ʽ٥uZȎcsJ.ϏdzxvsgAgaOsu ].V/GrNhaoן.Η%~}{?k@Q ~L.+=ѐ??Ws꧳N&ӓpOgg|}%pm?{Uc,G?'?4^j%-DGC30H찮O&UJTKc6GjWuOZ1>ωٟ#_w$m?ԅ ?tT~!W^T tQݝ OF\WxZ!^L>2{)o%(AI+ n&v('_鼓B<ju}=}7׎͠^ ~oǶbAߍf{f'y'$uŘ]Jl)<<IJٜSYpϭoĭ'Q !XMF'e@V2'%8ȡF^Q6IDrOЋX|BN\vZ2!,X- ) z ƚIX0Y(C2;2C*]**Cі˳TŰIuI eoE2l0O6OK6ƨR{eat|-h$+B_E{ґ89(,_:= ؑVkdjD{J$eHs_˻7a_Q_Zkf,c ktrUо=3w /&h CކVH|־#]:~>q0(6̀Cܒ LлR0z?㐟h O2/v5Kr)xݐt=XFޫ6˳Og'ԟ1R.tvk@'E;گǭQs{IZٮ?6GWڑ 6%fceʱ!8id@C ڊhvKDoi$H[ !4vݔL_}I 帮eb)gwHZNKnl⹪]É LT/רFQDQbz !cJʢ! %o(x[T]VJ/AH'2tQ ]o~W>a'iznPxS T|ɊR)(1$i A9"dU^ Mz LC٘'gVX5Lq-FKXFFjY#*r* hm+ȏ&k&G\`vB5jc!2e1Xm ىC"˦/d $=l`HRQ+AD ,* 44^wBIdS9"$RBVYY2Fι62|"DzJ L 9ewz aLkE4#ڢT&Ơ?-5.Ԭ@,H/ "*$#G2KNrjmzgSZ=!]7ywӄ]]s#~DK!tI+Ai|EnwgZyaxO' ( j5 W-*%\Z=} HC^'>LV=Cŭ+f[iU jQZϛ⣤KGUYӹ($)k169?矆5up4,79Knt\ѿ.EUEm[8O/M{Ͽl/"-QǤ<8|S3d6)O#?JɻŔǬ1׽>4RAp 1Xer!$ `2nc'| լ"ϵʓJyBb#Ъvl&I5[<9^;y䡍g#Z@lvsM%:G`P{Hr$QcPwBoF{9T]Q"T S,z g3voMB}Ar?حќl+Cw{*h8"_{0^LF^}'\f @.gdV.qpT!Kl`quަN9~?ƑnA?>Nf9!NF |(Ѕ-r Qc%01rp@l2xoLFm%e]1jbM*1^7>N=*+P vkӇ빱mlALc-3˒iVĆzf, rU»rQ$MaK`Kc"kݺ5 >ʵWiZY]Q AZ!y'-u![!yʰCņ ZH ]%$/cIwm%*`2IJ/y #x%勓%Psk4D"&Y->LEm2R hvhf$^ܮ C%KYKBQKW4WZp^YAZ֍YZNSrr # hO6$Tc-;9Cyv>d\)vo$AE{UgUNc"^_m*nbWx{C7߿iw_&t9L./nG7Yor#`I0FT+*$"=ȍ,-&*8o8xE C@J.OҺ],ItfB}Ѱq,jqxJ?U`nJ<t+ǭ56*ɻ익O?"<<-^<%<eX 3h+zOrkgcl:׸:3O'*'b7tLvZrءΠdEQ⒫GX٬o [X Htms fyC >9R}o )]ֳ)s$\DU [uYiW9X ;J.5ŪMn]VKͳ NXVJw'jj=U?tKlF=I;aJ=~ }I))7\IX;Fv>,Gim}KyiAC׋RqYѯ\n^n5o'Or:>թ"yI.)'h4i| 硱-|EkY{PlimL:kBEt""pmDNz*8rE`7$2G/YءOmN`> ޱ¤)_) d8?l"-_Zy2$'~n4GѰuFT.Hg7߰y¸d>C.T{+Nfƨa9Bᖲ=_,,m bm<;R8>6gkXӪˀ.Z9рCwn`7Aze܇ơn\)|ZB`r.hR hֹhmOhVV[L)]w'֊_}\\߿ׇZsH,lNgXHR7޵%Ph s.zc= 4OF˩d-AA7+ [^ٻ< o?Σd6ތO@6+T!TxH-[ !3bL>vijB;Uj'GYWyuHV(qrV z Dc| ԥ'y N3_δJKN<E CI"ٵgqJѵ-;_v`Q8:4 h'*B5ԩ=p(JQڧ6rڍ ksm/|^M'w$ \VǩC*ntWtiL78]rچ=<ALո'%0/GTǏ 'aT9JAKa?}dy鷒6)sĨ9*#捃1pl90L*^`t BR)E\Z8(ɈN@mKkҨTKgXm,lTvZ /dK Z#<1AD`$ 3$ KfiP(D++ND&ՂrQX~^'!P2O+0ۼp,,vWP[ EFCcS;[s^M ?~8Lb(RJiFApbAZ,W(U6M8S&LH0V2.@Bg?OKПWt,D;Ui}1@g``d]rh~k7ڃoȇ [5 LH:)bD@)TYL8bXf5NRBOHʭ ZO{V]ɪnm0׹>[ A{;_+h2]W #_#3d~s2 З^y\}Qg7=}l:pzƬYJ0z5`d2P%s46h*S2XQ$aRi:FDB p1N^  Ԇ XgE593;I2aaR,2\…~ l ,Y^lNTz;^jƜ pSp ox 33$\K?=Ưuzb<Ǖ?D.8oIt3M` 0p ,f`k)C4v6{fCa \]^Zf4Tpq $cƓD+R)45 l6( "PTo0k(5ɈHK-zMi#4McgNaDҤQI"ư#DRǔrE9?*݋MF/u'@ o7 vb3.R{?) HQsʁufvbGc'8:iM"p.) <9B ٸdUHgVX$U!Ūݕ2ފ#ZUuKD{W%wvg1jO7ع;Nf߸!yAEeqdQ(2#Ҕ5fh"v "Ը,<p(HUIT^:sp(ywU+}V;d4R*Gӆ V1C wDz WOaN'9Ѷs"k. E ΅p""X/HXxT.v ' Xn9fyT?]%ǻV1Ե^ /0^ȶ:{NPPЬX z鶜>U92x>7սOY%>+d_ܱɰfhSEl\*bfI ҐRa֘-/ w_tDKN,0e/m+{w?giyg|Eb4oA\(8\ߢ+8p 4k7A i"8Ǻo/^Vy DW&˯Rٷ/`KQ{B] :e 4<˲$M;^nN%qW%R;GH$jJlq^8(.1ke{>HЦ.z!/t$Y.B;MN  uZ~?o/T)L}X.֭_ܮJw/qrz?ց˿k\q+}pu1bfX{:Ei"u&X&Ncb2( _yH$J,Ve i*z?l;jQ݁"_a1bp/@%f]rh~k RD0=bTT$M?&t^~ɅD[S k8TJ4q'NK%"k}1@6r+M1h~ ղN47cKr%,[[_=>pE hsH'ܐn@n_ʎ (5^T,'kSRIzau$aL^>7E3gʬɘRPj1; $C}ezϠ[v"Yhj uԹRca 4B`I 'H7d AQ:eM0;T65ćSes5֡ E Z|5 bϪleb:MmLc.I*HYb&Ɨ\q LZhqFZn_6]_bq- 7A7e =Z_a`#)2RTS"4ldNh8M5QDD_-D4KRCx-2 |IY^'F/ikY妞ߖ>/ A|:΁u_.~ (AX0w_%BcMr+-Ⓕ{1jr~l|hg]8RGwoFTF:JfoW{pYwKXJbo L5` ~1C_~^Kf[iaaCx+twv)LjRTݿ=vt띎wU#fl6!vL\2}fH{ΰ_G ׇ"j@^ͽ{btN @.aϻG١0fn ~cq }C1w+l2}7}{W_`N}&R  k x9jAqJx(wd5C LfO=]ʩM;q1GY@Y؄Xݭ;n2f91Q_rsٳ(`QnԘJ| I+;"q4dѧ>;yhd~ Rjcrcn⁴թ+Z"&@]bkF۳@n\*Ԥx$((*+q-#3'}`p\7Wΐ&:ɔru..sh夏`V8RH␡[2GH=^8#p\ Y:}V(ܚLG dʦc0-b`3(2IOi6"TAK""럏 +mH /Y,2R߇ }q {{z$)Pr ^K8HQ:(!#I`"ıFADN3JULZ&4!h}Jr[ƱA'?0ݩT;eyホI%D9|x/NM |ıt#DRe"IHͬ[Hq sMh%,Y j F5QT eC5`aky_(J sx=4 ̃\Nٳ^UbǛnzABQU'ZrOq4Ej j p\P6!h>\Bezɴu|=lP8JBDH- ((,c.JBH%DMHXAV&B\UTPU /{9yeg)t.s9yb.FI'Rbl`F+ڽZܟ}w8T{sF Bqzhӳ\B|Yo"JwY6W D |,&Y>D`W͸ Es׏~2iڹyvWsl#C:4CƁ[/5|%;T k˪|ؤ$< uRx1㐷?ዏY[+G ѹќe{hs{.<Do2s>l|ũCK.QC6F#.繰|ԞȜ{uR{@ιؓ9?\ Qw3koOfy\]v4M)^pk95lK0o7Ll^_JQ)6ŬhbRt,$N`Ǹg,cFg"ȋE3PFH)PQŸ)f⧰{/N ]w+Y qb;N V!F  q[)1͝1qv"Xd 7aJ5#K*%ԂgUCoG<ڄ$*/X8xUc"R# `pܞER3㥸BXm˭_c&1StOgŹ=v_X~'k/x;jx$_G / 7H5]fz(t#M[!Mp꿋0o8pv}U^d֋ԅ\A ~m|VϷEv^o߬q)%S >XLJ#X:%BDz͈kFH-.9K]{;I;zL#DWAV5z9NH;V/ ]s@C:O^9 hzaNs]vҫqHئXa4>$R_8FXbo&l=j^Aoz+_Porso>} ߽Zbo/7n+:ٳChtkB)ɫvÇ_> X<50W\WԋӒl]0 /.(n)mUeÌ'JϟZ{qcp]oM7ht`\sgM/a4ՓsOCS?}&Aj4|?k֨2ަ8~rhMuy ?UVy@Dª/êtq^ϋy?/؟7W]=! -ɱDr G05E77S=Ckz?wF~i'vY>|qu)Sϵ ,S$PD^+S)(VGVeu-;+끁W6NLE"fIa:p]Rc:h-7U<&cN28[Ev$DJ C #b`%(5t/Z,yLPA8t̙l*~ (HʳsߍO)@јPDkO} SŒHTQ7x@RrN񵻯xHSe};+*;W.4,OxxS 9a{3R,:'֫;΀o u.Ѓiֱhldd<*5sT#iRnWTN?R%9J.fͻL>/Sr)t8Bvtu4 f!9(8Ё||c5{iF>Kmg)A5I[t(rS/+v_V Qeû?ŕJ!7!ߦCA޼p|HSڟvwWOlb۸Eݝ1u;%8BۿF{hv[pͩnzݱIapE)ߓBah=r,X>Yu;TۦbJ ~ӯcІh;VMg䬚bWjR4yX:OoQ9c6ߟv܌LhY+O?b)ѸM6_`촁ݺ:֍Cv* b77Zm+&[0ÉVMnj11r)0z\\O8"//X0fUDN4^L&YcQI::BCG)PJi +ĹGo%Io%ں0" '0a fhDD:tPB¥R3R,1F#M*յյil l#8!́+uo'}3'BބTA?g?-;*5ԿgN^}Pɻ+ #FU}rnZR092cjqۼtޗeo^T7j'HZ*\䍕uKAYurr AneUdYzvX-#8fHO̻*6 e%P(@k'Xs*ZaS$#u0ǖy"ppir֝}FuS B\"լۼƂY6!1GX%*bLHMKn),/?p,,jm8Qru Ű%znBfӧM?s^<59/@qd4ˆPARtJnͭ4\+{ce ;Wj*@^ b9$v{nP´*&vӚ)U k3 lDk,F`L2GĂS14UbZXzf<` @5U B=Al-^?)̓b؍ ;x@W&ۛTeEw̗+{'+Hi ֙bXkqLNMN!:$g^Ώr,rHw.d*or.8GRUĠT|G)?]nUZX !߹V)E<.UGݪbPDtv#OW$Q1*vCBsYBKMd~}sS&Q\bP{~BUthAu_#YKX>7E+WA] ?d fI]!۩; ְa4S?$Sa Kr br9J,[#Kv*K3j`Iɞ,Ub²"K>[+5\3%F\oxsr59Nٝt%ԦԎ]ZeM-IjKfڬ| NŰ߅[^ϖd~Jc;gFKxl}Y%L,Tp1DçIaP4f;vAiFHcM>}r5dԓgߚZ[#3ʖ,ċa &&UVTWl*kֱ&NP_o8y& ue%Q;˔( 8mFQt.{=Z˨KW9s8Wn+g4Ml oA^?Ꮇ uh[JlҖ@XӚ-{RƎYK]6Ⱥ;f%e'N`yݽV[q3 QY{oPll_ oݐtԺ>)3x용' a}v 9IJ* oB燣hJf RZL>KtU"Fy? $@+܈Quv7 Su#~ވܙBpR_A~D2yLO3湷̉C"&4id3iA| &q fo)v=oVϯura' F> F4 gZ';V}giN/rIIer!I^י( -z!G4u`~6,~JS2g$C~rJ\غNSΊg \fK6V.hc[#jlSn\2s*N%BP*϶ZyL6p<11Y8 dcĜ'1Xqjቕ(qHXP]nIA/ŠFfgvlbF^-ukݽ(K$b]Xeݰ,Tu"2DVdqlfo#!NjǢ\)[e`]e*T>aEϞ;U/ufFMƲ0^_G"9YpdJkR*e9 )ˣ[cӏ}m ٯM7˷j@LX!p݈g ݈WZ浬[>̺8R7ШTH#@ $A~}A=F%Ii(2jbH-qY 16o;lk p-om]&@&Ї=7^vl?mk.?|b~BSё)iEjjzzByjyռuGQaʌqwOk3ք8.=:Jo=wLynq_F  1I/~=\LZA' #ͱ)bǥq?.f-y<ݟ=_-] ¬q{4_],2[<<ٴn>^z;xF*qÝ"t߷xCĵ4$-i,gB0jHONix+[Ci2c$ie&(Ȅh$( r >^Vg/}&fǫ_i۲;~?*Z)lkzlڗ=66ëgL9uںT>'˓QK%#|l ,嬄l#%'jc^^i b+TL MqU08OTТVJ/@0Xdu7Г Ʌ+-H^wp"^ײ 4g^ ur~4.҈F11 &2!6РD"򨸎DBtzUIP1"g,rscBPdk\0"N畖s&Dv#&) UZZl0K} X OY{E$UM'x$1 Ib,U&g!Sf )st_uXv\lDII~^n,#gi` ???{q-?[|@ʏ1àrnoߟ3  fDLJs$}zv9~Jg[_k8, }KɆc&J3n" Z6W;W?*8n6- Ǭgpv;AB>AL@Z6LF 56gܓ \?[NT[Ђ DU%)T|;ػ> LS}6š[l*_5B u,6:Ф38i]c>:[k\{/zg~kFT0:LJ[{NX8e=?hArEWVHt ,D 0B85c1^G33MTb dt1hɕlZ#KN;_V^Ջ}P>_j:z_V!*K){+DMC#'z /߀[*|luߘYor89V`7b<yL4-!9O/?əJМm|;nt]nrEP#} ʌ[zGKm6PI%J1(}3xXpcpTRHmsFDGAO/f֥穚@ 8gS;w T8BR&"+lŔCU:eJS9[RG˷XzXzUOɤdD$xt1rbP&s r퓈DFT}TN?e;绳6J=b4tr1_y6AWgzML ̷ŖcgsE")&qxbݟT7˺6AU>f"]mG4.#- ޗ[SЃ5;w#t~pr\ s_XӺ}X_="f==-|V #2k )]j(f]\WCqm]_(@yiU?܅gW_^=*U&5zϯ.\[dإ%]PJ5&kޞ?\Y!`1Qma6`Bl_o߸ϧ=j+8gX?88k^ٱDo\z%zC')ʡWGUCkK1. `~7'H!0=n}4DAg/WPn799z㖹y4UƙIWf9/Y 8k0~ԅ`L^̽x%TOl{n߉gj|/dOK)=vk!:r'5XJI/N̰ptb h%RX%=~Z +ZV;E7ې7LWH Z6Z\o2Cueaſ'X]xfJ}ЗKˬ؇?<.[׫&S8sA LZ͙@9OR b W4)1X$sZA1JʤWnT6c$3F.peQM3 t Bİ@3΄hҊi|&II5Ox Z@w~/*9)75S%[SjfYUy$*Qu]ʻ/h#QJֳVeC&!q1gPK9@Ldg *g+Nj~>bҷ?y1&(gZ:FAս1>YfݻK.Vk@pz`mJٔx!j!rù(0҇Ηţ&" ڑ vVE’2&ps^8Y jJqs %bΰ0:ţ{ɂhSXD)%a(e|YMF|P~m=Taކem`5Dm\Ae= (!6J% &0eKpvSI^_yE/TnL^&Yi ƅpI寴l.T6 ᑛ=>Bm_8y0v,6ݱr6;w<^E{=iKȢ>V3|q7ws@i}]hUJTNEm"&#"?A"a[yR; $69R5I@DPʖ8Hy #'61`O9&K6XBnPȉQJd1 B8'ħ‹2tWn=p+D!Hݵfzdz6Kz\]zIS.*81hF6a8<|L)Tո`'a2)cLJ=HL~'5/—L8_oۘuMgVu9(p˻=nZnet GO@,gQ1)p*By:*9#lrP-ݙ$w[Yc 5;cr_Kx7ς>݇˷D/\991U#Y5[=\J^t}0Z)}$}B@@<۽nQپ y8t16Ӑl/ 5ޱMHoo+Bmpb͔LJ' PB!AZ/ B+c 9Lt/2!w3VdTdx\5ۯW^? ~ ru)І]w>^_W?} j癛0v9\pOe" ݁ /_}:-DWIT/ſtHdP 5c8V(gz;x>)q;EHZ06kQ&% ĺOjSVa'GJ;jheוImX~*(605T 6" ZVcÏkʴ9F:- h f9˔nV+!n ^7GFTt4\ ~☐iA}͢ ?5kI@j=!]0@HGNn?KZI;gd s:I_8JX!ՅA6ig tIl>g<|xJT(FNW qyO{(jآƵi %T2fY?\3 fPHN4e,?׀+/q=:-GNg9`E]N's KKNu/A$y{OjQGJy!GJA*x(ѮwA[ ' B%N-'7'v(k@n)7e }=P2P͆#OVJni<-Ri>wV]]?tfvY{XzăL)t{[Y[y> \>֡BD|AKQ1UͦQ?T7C}ak@X3|(ݳb8mڨP˃҄HRMi=IsͥKv>O;64&}~ ~3['GG?FwuD9 :O-\@1QѤ厲1O>Lht2?з "?%"/;3'52{XIPj`AdqH@b9ADGg)@4R9!t!`dod" ZhnBt8eֳf 9~2NMx:x?~wx~z[ӻ_G߁O㗣 [v"|_?i\W?>d0gC{=CJJs=zJ}c=.tfi=\jR :<உճ?O? p6.xqyś-{_y?M ` Ng 0&;=?, s2`MiڗcP/ͯWy⯁/i$U, ul={rs7Q>d0iїɬwv|N`4w0;^dftz)_M^^Vd4Nާ{%)4c_y;Lz;Pu{j{b" ҎAԀỊml2>?PMg0{L?cOgm-'pg'`M_LNa8ͨ1=qD3hp3.}Qzwdk?_ 3Zr4Og#h2 f'\ʍOqbR&wOg=~|e8^cY7,?F)QnAuO0f`0& Ad4HڔH2;8ͷ:>r@xxr=}9m3}]9"$]2e}\SVvgןSu=*_75tIE#MLFroEug0JI:PK0P w֩Bѭi%CsgGИdjK7:SX5Ax W 1,UG)Uп=JoV]V=Z[|RR{.D-Cӏ0f6Ua9nd\_V$QIKh *h ksڰ\wxM4AK6!p J4rZfsyI ‰㚋֖{&ъv@\E=TBHٚ۷ -pR Lu2 LZ* Pa|9%at lah,%& +- T 0(A ^{Bam|Y\Nm/G+=:hV ̱LQI&Ҡ7$%;A, " G¢'@TZ:9%_Øa8BC08` a` L\I@f(P><¬#0kそ7Gv>Lh6ŏ`oy9wXuʀЮx)%@`ܛJ[ ꮀp%@5`7vKT1fɮ9e`8fXawr0 ng a템ivQ;p@T.氂ξ96#f./]N:χvlkI`?-NV}uJPT"FίQ(mn9Ƅzwaqǟ"8C)]ؓoktJh2 7yub^W*չY`'QW4(-B2J*L7‹ qτFv Зl=.G~s:tb `gs`G[f7;_=Gw?`挊Wt\.YMagٰD)ÈTԞ`pW.+[MOx/Tx %j-߷qX\ߝaeŷmqb-{"iT Gذx 1$EB6Nu$FTj76զEDy6 B.Eph\>t86`V27չ3}؇i!zr=B1V.oORn\X2g@Z63؅`dgt7-O9uՐ%z\ƲB\Dz)d>>u]R \6SE%Zbkv0z?c* (6@s3nz Ov݇cn3;T@`}]wBLU~w& C秭6 +Z8G +Z۬hώqv4%5ɥnlJa4Jz(ly:ua癬t܌g9CZtnV=Zo}wb("j߽{d7͇SJ)"FoF׫MfrtȮJܧcgmٶ4`Q+o[\ܳQcYo4b֪QCj4uZaAWw%ޜG)CF̙VՊ3ZGQ?4RkJuvl 1. &]a *n Ze[l[EP]mS&ۻHn(mïXq]Wkz̚ʖᤕ6P="|maXvhy2>B>6 )}jIY˨4mmƩuPQ2IX-9W[>"0}vRbg;ϝُj~T JHZ6wT/WD#B}GlZq܌|xʹt܌D)u (#O; .%:Bt׍6[+ڸ64,}҂lIoBe^|z|yoJ+[l16pî{yJB"5^NL8>mLq!ר}wk=F.UmqZg-ZAଊ) Pz::HKZH>',nׂEj̙K/Zr erګ.~9rMHM검RZ(Jny5痺#{Vq+] D]QI ᨅ)UЫ71J61ք金(u|HjVUvU@Cև!ZKQ`I[%T|Xp0Vn(8 7a2GJF'Lkk5PX7`'4 ɔ,E0H"T0XmPM DqkNXZ=H:,vL8t5F+dbaJW̄ #h."119A P&=ܩAj9~ 'yk3#&tuub"e/ 108ID1Rmxɘ\2M,0CTM|#C G$$ 1#DHrIVif,U›(YrԾl@{c.du<{' bQ( BpidT!%%/EluՊuwrލYλ1y7f9泜 ɄKo :Јxq[aJ堄l2P'w:(W g C o- ;%!v־*™wPR< ?`@ 2T%XI jnUa\ 9M7[ImwB]&(!9 42ᬌK?A-ׄŌ0@sXECAt9yYؚ{@.9 N(NPfl %IR&!POWL8Oba\T:uޔh"J񛏯0Tl\dQ]JE鎛gV2L勋鴉 &ӛGӴ'jx GGE9a#NO$y9 `D  Y؟3Zj>B mKyIi owZ$]`0slpK7T 2M`*IZrQ2'??uA`dMWuA(]f4? wbЀ20}ٻ6$W>mLyG= lOm=OmyZK@RI(RXŪ!0|Ŭ/L0TaLGEkWRbt.IELAD.)Z*tZHCH\ iLP_˭i$<69*!5l=H"xC{5UbX \[<`)(K0.ÆB5O-w1eG,DQi_[LB5:(yMZNSE-Qj{¶?;L» ۟VEtF8աz:9X] 0"V\&uc W B6EiP\ĩ@ÖQEK(8%D:%xAJ3o]"!S,kB]񹇀;S)Hgx.-5S!kd xݻTDϗvh-^x]?r'h TlF%G@-DB;4EeKP&CIr?_˹/ɒ.s]|ӞL_}ǬRCy /ͥ6Mig@Bf)]Us:*w-Zn 0F)U._ MIw+?. ܫ4W}֦5EҬTTTZ1rߖca%khz܋ɼ0adtmWSm^ 5WytHlƣr&mSrO#5켙XG7+;Q^iKq~5 V oۭGt0M(<"Ĥ K*dΠ'0ը>H}/T{wt. rD@(Խ'ֆDP|Op+&F ޕ =60uq F#A|*І)#cf4 Z%-ۣK[v \GHPP-(@1%%;^1?m`1,Oȕ;Gxt_t=ڨtpalaXbѩ {;Shvgbh;s8TJl6|B˅~ ~%άy3`b;ԙzrWLPEyQ?돟''[\_P)+nH?f(K7ѦviSds ZupΧd6Zɟ ԨZK&KS%0"AF-R0`4 B+mmSl L L*n9zCrض| 6AYRm4g@ M<GcLe#wWXN{0h)UcJ/li9kג4jWz=l)? hpBB0 +/"FbcjX.UxB25Yګ‹'K{GYS[ tW#. ;O /\ª^=(mUkۣgl@x[uZކ;}Z^6#O Mb6jÞԪ9 J=*(SKz-tbs]_w7{0h;UЍW$rtN\>T%;zmR%#Tf',ǗN_?pࣻcRpڂ*ZFtZxIlS&ԥΆvpJ<߾2৊km6[féQk~pKϫTp F=쳂q3VWMIvZ90&b;XUn,q+9Tp=FE_7)aԻAe=Z7UB|[겂ﰁi|=+F%w\]f' Ϻ=`?KvS)pEI_.r[oxATmbGP7U|9:X5Yt._0inwb avsS6ͧEs *bA:2ҧe=gr=m/oVh&~^Z$&ΣJa:@:-(Wᴺ0Crz /e#'w׺^U?ЩH >-ϩ"0;h;>yW[D2JZ3NP%/]pvd,$bK.kAdbbIQ\#i麦yJ)nfD㻿F:\+9+%t9]8Eh|u>w= .E'sB|*p@\a{ŭ`* &F;~}1po_:'odBjynjUFha4|TN S*D$Pm(W3L˓w.Z0^k8N[0"Lbi䫌!je>/|Ooz&winl):qXr3Ͽy_Mpq $)?t2xP3u ]Mۤ^rJ(uIS &)O{cR42j3 @vsnv| tعTB@S^U.HƉ|H CtTF EhIJ1Pu"ML v*ZÛvݡx}_rm L@HQX]8H4@mҲ ΠW dI"??#"H026S)( ̬}2\SI!(qd_( P|ېךeQ҄"h NJHJ '`Q&`ץ3Gf:2rmRUXbc "2`rh!.y7m!ѣʘITF)fX,oc$Β#Jͭ'}T3_A)MmrŠpK7KY$KMz{$R| > !$rŠRQ@i <;:iB*ev#M :~Xi5iQkK=+ 2wK6cJi@WҴ2\):tF@)=QCtaSXe0O+5JS.^t4Ch{>ik: noAR=gƣAS3[L89G|=Onz R m`/z ݻQ׷u-)+)_1 t;O\^Z5PRZt lugih J=#6WG+\nZ^Qg #ZtZ$'!i[ĮN*ε=jiPV(0% Ak Zy1b,c)P5JE]l\soEŢ굾lQ83[7KWaNC%hBQ$*Sx`*ʢqFyY8f^5.ﶢzDDxk&V-0yFUhةgxD:Bp)cLCȧ皡 ES$&i퓢FVz颞5^mEfx5F"@21-Ţ6x+Zh(/*t(@zX0ttA."*)b(1@C]\PbâwFt6z6}*#*mKM]&UGrR.qD۷xO>Bp*I?׃ד~g(/-= T-x%n,4s#7TFp̴D0B0g|P,Ccj8[ij%Pxl -MmMn@59s0kIJw)AvJ…p6`4-JX;0]P SiV3cxY7:ONUTXN(mO~ͦ@%l&}>`iհ^Afw$+>D."īR"Fvmu+x!BX =.;i!]ĊJ>>0Bn<^ { |{wo; _1dUs,%W,Oz v9$)8!e֊Gwη4윅YAgDN M4F*΁$gh 5}ΉB@@Zo3ZA)ߤ6#%ВKbH ˁJ% `"IQ-#TZ#ۉdd=5~įvy?KF>^ӿ$3sf]pT[IJbEUXVXHBrm%Snk7E4㧠b":mQG=cyվOzڭ EtKtL Q]Oî]$4iV<hNd8rZ%뇋Pd!?ДY߼{M4jG~wycoժJ,1ǿ){lLח.5f ozX͸LTNG.oͪ7~j~ߣ2&.=0sQ9f,hM7Hl.lXRdf!v=SuH\W!_;Y\1Ǯ}tΟ?2Erg.& uqlMݽSފRXߔ>oiK"Z7\053 vx!L1FroOa>->ǰ`>[Y_(ӔyBI&|g@ Y"}0 *)?x+Ŀa<'O3zEmg<ᰯ%n"-ދ{oj_~I(Iw>#MW8pHBCjU!u]e $u$\iG2juAu `=?UMz=q`}ua'_}èˣEMQu8p~^mtO~2.M,`8y??U׭G`kY*Zk,_ma7| v@٩ D1՟}S"Ȃęr86#:W Q2H'&5'_ EԊ"0zV_GMy虮ɍ;K>wr{M|Mg rF_QIq=-E`瞝 {F$]|}Oy!O4thu=9`õk'[]=,'sBJ 2iNZh*v5c^[/C;GG@6_+2n W,_vy m`/z%} 9p2 Nt-u(ۅ%Ihx)Cn H rVa 9zt~A-U\4m:HRd8Om@WjƎ9 n։dPs'+ !E:gJe766-|nsVv62ڿjg47iiXjzul5OLWW?=|I+2!7BUH2Vf8!< +cTO~7VfLΜh D}!E9C2 Z@ FZ3סW;zuW_^[բM?$PSoZ]xVpNV0K*;ox7{ jN~֫k8])2}i<1<"Z$I'Lp{!&)ȩ} ۫@']h@+spΣ`nI)mk+@]h d}T9$qD_M S4Ɂw:&`_/tȓtʁwbAĹ.#H1Έ5O3bO,L¤O\-o\ۆ!U*;-qPT[5 I WH*M-o)/C>!a;M!lOڑДso[Wd29c(|pA((Fmn1(r /y+]*ŰQחj2D?_.j'%qw.oGzWNݺ3߾s2@1{xoJ_cz7wcF6壿|\sK!o\t1ooH>bM6wnBZbGeԇnp~*pT <=h@oh@?&I>!1}A+~vZDp̄ ԶNI$ˠHKvrN$jϠ~P ')NĠ(cMC^b:[bcQt^wzqp>>dgSw~Y\Ynkҡ^c/=3jRʨPHrWs +e4{| JC0h_ȎhA:Ds4*b$Htɟ};bShgas@lZD/o"K|rd /eEPiTWkoV& îʱؕv}8vIӒ]Ϻ=V@V 1Ꙗ"@WXvɢ,k,u1ޒEϴ=,hwb^\j˨Z,2rKcCk(k;)k3dRedtkpUC9< ӽۡ" |pb:-q̇ۘ^ Iw1 D/݌f4s1mhR]Ԇf!/m zbb~ۜXC"LjmbPyD{8sgx7VK|ҙafh@G YxǗwNyމQ}d0ͫLN?W ǕY8w8;@aH4}Ȥó^>@2r(5jdѺЫIe&տږz+^ Ic(R͟czW7#U-@Kz*3_TZ<՟m˪@ - IQInsD8+hyIu7hJbR/<8*->_8YP;E%{~.}>,q5Ÿܻ_K$FA&*-LyPT0f_(^F>Bxw$ʽМZknpPJAM mX͵ycb !W{)UaHF+aY_EJ_,dK,yx1s < b/2˪_?ÚY:0bHEC;E|$,wH3m7cº?zޗxCft1hP1ھ}eG x oC!Ӳ|h1T^tq4_X!V}@VHSµD%8ʈ4h[/[\&E'ăWkA"L`zz=m6|FGz= ;0i$d2ANO#& Db1􀯳8|p 0Γ/X) e)jAtއ6w֑.){iFw>s }ýO$ORJ^ 6L #/DDIN+>W~=-k@N0고D@gAg4JPp4ܧpܒR (pāw(~;4p<^0 ,4Zmx|15 ]KdgGtY6Q>|E*8xMI}C\9x 6"dzvI<Å2 JA'Vm? U4րS]TPFm!h`|$ǡ>%Rc?6bԆvs 8y\ )1Q(ӡ\&R.-q]E z ;DZ0$C%&2a>J #=d|lNĐ-]"!_}(8ЂDtd vѦ-Id̯tӨvtqkX4v 28L02x<*J_`m_^Ci?q>J~ywװk/L&b|3 'kftݷ41+E B/p&嘘 PXʆNV(}ʅx[Ó:« ЗU׳Gdb] D=W*}neZFr*\*}ӉrLkж%1# $.iiZP/GZZVCZ.C@C쪔֊e[wͪڴX057B|u"JۘE9>÷bz2(>~l}u2_A o_cYB79"B@:#K 6Y-P$9>FCr(pV K3[e3>=}[ÌB . jØ vBlyRe8Nxt"ZLeҷ4z1ZPZ;}<_O]8_ <{\gvbqup9ۿá"px`?}5V4|jߜhW׷^:j#$?t_rs}}qo޾xn>]?ǟ5|{ɏ/o^q{qov(Rc^ŷsAfp,f]"q΍1d6ﲇ*0Jޟѯ}H\;{)D?t6 QvDY?$bl̏\;Z:?Fۏzzj8_e?" EV y٭kb6sJL 4w5^^f44<)5iJ瑄KyVh.{T7QZ DJmbA\ExX1S c^& /Γe.RDq0+$pLT7vSIo{vڵ6{+_j%Rc2n&^n[έF6z_ 7zֵs16zpUW&kg5b0mv(`?vZϵ柤jZJBE%b*dܦ^74OpPWsR|ǒ+ED$ D}+{UgEnu-*A H~֘g ™"83` <2\B=\^W s4>Ghx@m]E=`x';C>zrqPup?~VT|T%R1$^rr87dq( K<iwH(c9JH}t*ɞU*y>Ww> | CqFN\1߿'CyPGb Rݏ<$R>qW/o~kDA5CVU n3XJU+ g.ĊXQ#`yV<&s`M0e6kE!N0KjI˂x]S#O ]uO?o1, k&DmR\7F!Q\* 6Fo~%k<<+wniCmV?~+HW/w[~:bX9>.t{l&QK=qcs TN٬)t5>"9a,&tr6HvĜ3;?YmΣ̬K%.2x*yvaoC >uL \_5>7Pt- got:V+^؊d٥ހT=wEt7mM07ɾ dC9Q}͵&V2Ϻw dJ9%s7!f(ש%IkrDRr|v-1cPҖꞹ8PMG]!-.FIUo`t>9s.cQ'(j͆/Xlo~)QFnDMKTh1H%)q@@ +C+C;׮ }SEv= #' ݌'.Gp07N.w>1GC bjBQP#3ٙFM~.knK2.=Xm}l2zKQ2qĒ,ң&1>*&S0Bܥb4ń ـ 28|D(+]k]_xB0ZE{ ВaIagcE tq s(] zh)pD>=,b=Cba\9S\q}J<( " _j&h KD*vb-Jba.>"Lk01Q&>##hxv!eW`2' 4/8t>N/cO"H0[YaU4s+JȆPNLO:ѿ'& k6;.,f's^:JQȸ'-x:wQ-KDUy4e#[xa( Ybi"~15W[F[P__ͨwLDI뎿5Ooo8=giGVͮՋer`L 1zjz DDn:"/gpyL e -u_1Fk:`v?`A3*㤇SVqh}e|C.j.?~NÍuKx3;:$-eMEQVz 5S.{09*K]֭|)hL  HA͞fvݘ\Pq"|P1r^܏G]q_[٭:#fwpjoV&YvL4b~V<) &ᾒZp~ΰ3&Q4 S(Xqu KJͿ7N*^6 juu~ =1֬qwS4/"xٕ@hÕ抅\+ń+DN!?\]"~3`ݦ~bW^N90WmCV3*é !RaCNNbK9*d(@ICq0~qŽx!ġ/ ]s} ebbBEIQ1ABաs@OBX`C\X(] )D$ S-mBYb2-BDzB B#,mȚ˙4>6oO T+EDjmI)Og`ycmҚ";)#i+g|,1*. Bj[,mJ),-gMͬt[:GY|2QhP}._uLd5;F`]w8kX1AR\!'x'En_q|TZ!(ե Aⓡ)f&ҽ\PI|PQ~@hjz(_* \uܦ]+롞k&O9u>F,wQsg$aD B n i1=T18uS#V'>hcۛϳ2]/}3}:~/j~ ɦcj%st0kj ~kɋxmԝRx)groZCݺze:Wpvrw6HDvk)\͌hd}fb ,} B0Isy]*rW_:RiɆsՀKof.Ox5n1r/_*%K6 54n% HYHU9%- ZE{\OYע"'eg0L@Lp8!iR7e赅?# -9;! W"F6 &0J𚝮ϾR[~P_%g͂Jjʶ} &]5 (h=)epQm }UDhWpEdG%1Sr(Id(:~d0C ݅Z.zm'u3!TIQГ3{os_zyt;|w{1{}(9Nrkggw_ݻKn9xn9w7Zlv# Y@)JgEOjFPGEe3jg2GL9%͡ĞL.̅, g?4444)re>gNyFAU1?((' b@r|(+J^䣟|c}J:}7NBGu':ŸHN.0 |"&z9]ړl5aTNRiJW]^&UAKRvo:йaJ2K#BXr'AyLUHP 7`}^n\x4lm<~z63 'RO4U\9:JpJJs&9 92[,xU(qA4nWPDc.hmc+ ۂ &}HDKȘ"ɟ1gAy>hCo?i4O¼N)O|ق%}x|\z`:= dKf6*2˯}K?w7-Ygcf_#BN߹wֆD|>6TFL _ۣo)JʼnavǴA8 j ?ExedpPB&b?:%"Qǭ+Ӥ- PHS6U2:ʧy{N:hY(-[hꈿg83_Ap&9TxIӸZnpW"Xސ)PXxo[+L;I(S6{449Ѳ\9f /'$G30 c+^b5ASd[fC 4rEs3h(9'Si!+QtμEט7hfz9:Aϐi!h]lp.¨T)&_aJkNY3p@_ ޵rDsUcI؝S 2` }CθS̻GPSvyjp{G)CΤId͎g XEWsI 0+`%qťIW0WRjn]} J'4+"5ہ"ѣlm^L<ZmZDKba=V/Rn:Fjm8I4"ԅ%Pwr5gmhl*ВnjC j~V iop_hm'1@PІfT MjHhGbe9e( Ȉ,A-|&tVq6KӠjLm_o+h71h&u@Zd_, Q%rL&PdAQh`7eIjAЯ݋ :i]nyX7gיniSB6`c'Zըa{XXw`dꬑ3ƈ1YDDZCiH VQWȦRNL*ilz-:2,εsIׄ- v78r)(BQfPGyZ8!ݻPV Co&? P6^l@X̠Jc %5 3d |!ӖP^V>^"#XbK,Cz&1rБij9\t&I=0?g'#!0"lyFZshvJ~].}^džzH^^re,ebϚŗ4 }ZxɇXt\_+ddݖIDF!} J鰳S:iJ#(>μ˦8F?C"bXޠ p{쟨^6C2ڥwK- 4k*L;maq"|{eP`/;ó6W*Ru߸"w0Z$l{\ѢVsB$6mu1MBD}Iviia⯸C U&S!*⪀ Ǩr`9tZwVs|rbra"B$Cۛ,D_h$bFCt`F2g8.B9tr NbJ28W >Oe 8nρK #L [N0꽔^ d<2%^840I4o';DdyEbqqj/mՕOLJ!؜,5 KLC ~8->Cp1:K?˻lɉ1~J!_\˛Ew>/ 90Fٷ脈T\@c8Ti ^kڸ/F<)fiM6(8H!axZ^YU)48M yC+@[nZ ʂ^Ҍ-[\GHJ^u{޻ 5xTG' 㼻~+U(cȶz|m]F=DMb$FMbܤ9WS|P.U)˥wr8SP/$d9$՝V P_@%僋&eH򸏛nHF$!=ITZ'QNʪ,@98Kp]p|.UK_UD)*"HƺՒƺxŞre>H'ҋ|EYRifAx繢kg9y .XN T؂Ea]ԁ@i>ݺ>?M Q5AtJt/uf[Ms Sܤ ګMcXrV~s Z`-CWG`%rԍ0J_ *`&ȉl;u}8*a 7(W \jYN7ԙ*. %S(lLJkc\QXgNnзF6mF|55 o}B  !<^nPN5Qz]%NrH[]$Ȩw6@Ix: ןo2u?Yݠ3Ej]yٻ7W~X>a0{w542[amZXvGV(UͺY"23224JXk8F%c#[-֏OUG|ZbZ~rf͛Q<5>?8sg׻/C*x/8Z_6Yf2S}2<o߆U| Tchr(VLf&avhb>TS[Z5aXېΡ!42Vwa#O(1 9{8XK!8dM`JkL$FJPAFRJ'R! ^(j6ż!`–O5wIS(}Bq 3ۍ52 n'w)'Ӷ_r:^0S7ĊN"Hk+NqjcpP IrKI_sb8=LL#D5pn ̭P \/ ܅X.T@`iyU< >IP |gOye!28toq,rS 2:EpqRXxEVazE*Q#5޼bؖXM};ඉ';ސŠB{)CHwno3MB1&TB2p؛NiN؛ <\+he{›P57rs{;p Œ-x*ĩI\s|CX*r NRIY͒R܅ k*-lg 6Aum54p4,L M d ɉ& s  Ov\d'dp  &$GU`HU5y XI I!&jV1y(í24gGw7 f~VBh1'wrTB_zĭ"gKU=-U8>UoWr 煳 :@x,->:P-xt6UXZ7R}X"1&|Tn4Jo3{O(1;$\6C:Dl5aygg[o@{h|;̍eh=O{o֟G6Z35']Anz/{㒷&6UOEPE_RX:vE:_m{/(݉ }PcU͢]P 5FC_ӡs҂X / peQ"ւ^U̬B h-Ckea)/A/=,N궣qZORK*ﯤdJ.( ?)ֺV,P,BN,t!`?kX')ݔ_F; +!d8{?8XWK$GAba((m4Mbi:2TKV3 {Xɘ*T*YIZP'm[LJr&s3 M 3Yg- (F$VЀ ڪx!6p|s4"cX=*LZey=f(zGs$1@ !w* pqfujU:(|ŵu"ϟ/-UJ!uj!R^lpC#]@h0=Y=@+~PσdO}V^wo|$vqD%#MP2F[u;:E*S5|F1_`vhչ88 ɾۛgMͫouݼ؎e #qmE1QD1cimRX G=7BZƴr/"`#3L'&Bl% NcDЦ"<.3Zcx q `ŊN2(%i'{ @pT\'x 38t2$UQ.JS.唋lrJ},b*F[4*tMHvuyKJ ]%iΖ4lͪ^Sm\멥3p8 16}‰|'ؠ<ņPy]yB\Beȋ7&q]HyZq">ۗ'2^|=i)ps^ÞimSCܫS8DsXS`$k.cI%$%jp7:Cތ [b3mBiz[)Fp]ߌ U% Vh6Z(q^iT޺8+fT:~uSt ;D]"kNS4n=յ׸^g`"nſkJ24g 6!^ ̌ʁw,;L@m[EuvshitUhiZ<7Tޠ锨/5"5AR-3Idv~i&0 Dl1h ll H%o՗:l=bVLK.u{"IFBE~9w:8fܧ ~XYg 'eAe&Zld&s̅>wZjz?""VmA.n)sNbvMt6>S]2w#HrlFZkW[+?/嗬6(g9+#@6lUU `)֙ҥ!5Lg惺e0%Eir`J:3g7TF?{if '{_.F(\!YH6MhޅNp:_~"I2զC ;x"̐r@LuM(CO{[.,mb ؍ގXkBi<S>kc (3{?KWX54}ǞMxa&`BABv$&{xQD42UVA5+a!߸6 S\qu[ S~m 5#uOxZ Q]нثg"9byfu\+7֙& ]!EhˠPcDUP: Z0[չxsNp.'))- |arԤ5f N` Tq0k\=@R!GT:{αNIqp[ S+I" JfZSȃx?R+nm ?pr2kOZS[sDI"/Ef\sj8'Ҵ `?0禹PkdZ9n?{׶ȍd/މYR&'O>7창;/(_f"EHbEY,'[{lsW C:_.1v 'e8woX?In) `]-.puqyoQoEV?=-no8*(o8*TIUSeEwx#ü4sB[OjCk="2ٳdZW{`ľ^q}g)_MnZz9_Ly_-ܕ~f 9#ppl4R'_> cfLr\W3&إ {Ob]E)@mnz""GTd߻4䛵㪲wSez|ŭ !n>%/%2_oh49jL2^-ˏ@nxx!y@ۘic2N{nJ^ŷҐ b2LyqӃ+p}ajQ1k kі2 ƥf *WbLq8A Cv-AYyĄT֏{{/*1躺%81(~o(tQSD*qc{@3xHw[HZzZGVnfW◾{ۺܻNjgqԖ=dt7(H^^~CϖAtݬkq3k$owtZHŽq[g7YpO䑶\OtK+E w)#NMI02i'IM$v!eKp{xTN=Eh;?-> xhq:A JRC?&4C#n'0y*ƥʏ̽{sh@9M,Ze7R#oTIl9V1BERC ESD$R&XH% kD[(v%ck4D%M='Y.l<3<(0QN s)m[B`FJc. AhQR1zV% 歎p#H)7JVq|xxr~xw/1*k{cE,Jꦠ-ňQDB5[`ofWw mnJ$f#zT#o,FL]O4Z1qZrktux)M^Eɼ;mxn݀vK1>E[iIx@9#Niק9F(p|fՇ`߲-:ط`υnXJ=Fu<]SluC`C!b)רbD >׆2z燴Жd\2iBm?~G2}:H0ѹ<-JKT! j+*6p]8Z&<'UG;IJ'E0t;r%DQIpĊQp6haq= M-j,ܗk> ӐQF-ɕ2jaPFGOc'J!+"%W=uw"ЯSßeeaݕ;BAqQ.xft-nzV]Ǟ/VG<<4iɯ[]ӔG&'7}>yMOQKsXVUܘS킑z\N@2fMk'='eS\踮B̘yޅ71ƆbdESqt+vsEe $cF(ľ 7t L-+\Ȝ5R&kZmH&EdNQRux_N(3'*KV5/G2w̡R@szi ETG(9,Lk5"bSə~/\͚Q=i-̌Wq;%҆ =h4wr%s8TkNASPp)g%F_TP^ACS!mT~PKTZl,]-=#RctXi„R;iSPyY  $Я\(F4!5U$:k#P{2U!@ t *A߱iJ3t;B3ITrJTj鉑DU$ 3KΕRBdO^& )򐘊`%H*gJ]]ҏhvrW\}&pZ*?@1>SL qj$ H!(3$RIT~|8%<#B;69 9!'Pb3ל5j%6bBg~.bg Ž G ӣՃ7;˳O+5}Wb'esI΃rw?]_}pљ:%Η|I =t^(`z3O U0mE_E9E2.C^G!d,{ݖA+涷P.'> 1(%^R0?MZ1]3\ תh__P=TfYT_.-cB,?)[קl]u}[׺ߠ1eL;%.:5'>Zbŀ&$p3kb- }'縓/z?0Ʌ3/6:gjYs2_^N6],,-%@]ڹj4/-ʚxi43Z&A4চD)L!⌰RBB:QN" Jy TXʨl4*k(2pf@5RzxE)%5RJ2R4Gr'iڕR254B=MEd L%?29mf RGKg) ]I{-74gO,:Hvk\ۉ> 1D>ykbbzClք\w{4j(-F+5 9) 1Zh0t&Q`[[chRZ@VZV(-{%*wXUe׵T %ܛHm=䥪x= SY;Wĕ2Uj=q rǕ ǻwY{o\.o_\PCT {wG$ $>8)?_|wTh>!. vY;5?u\^'r?}W= 3P\PN13Np*"\#*)bﶏZSD9>m!s^23_3߳+f<*mNGsS뢟Zq$Q&Bb>A+̧H]pD(NK鬡Zc=zkU=Ъh5wr-[%| æn{,߮։.ꥨ2AAT \ @ 7Z%;ApN[v.t'P2|euWw#h۹tR@m}L^S9XpۗClVvs6TsRp|/2mxueI+̲Ƿ-!7Xp_=ؖ-,ۢK+1zaҨ&N.tUbu!8:Zթ'r%{10G[ ˹nha\TʢK!|p\ F5Ci[JwtZtwnM,VN >=x[(%6Q{k-CTJP9mhεALhf{1h# r@Q.(\6Q%fRtB hs|.L坜.tfBh0M9]Pܜ5 XiNl襜.13%l;- 3.%. ^ .RP f8I8#t=WZZY7ZQu@ P:C`Э0=y`Z 3 (sfY P6d[em;/pemW~?*tIJ2+^T}t9ZEN[ẉ&29X~0W<}/7?""se򘭜&Ț.F9ȞS)i3j3M$hr^UE486x6`u![{Çzvjª Stܤ.Ӻm`4=x\* Njٻq%\1Uiv.Jr{[.=ci' *V 5שA-@ S7k+7ZŚHr߮#Lƴ䗳g FzcҊ1cڕCA; ]c1GS<6$ p2@Sڃ߮iPzⱿ/uR>Axxǘ!B/(-ኚ/EMWDWD-jZiV-*s~?,p,\RF£j ɴĵ|¹E0 1PZ>P`&W+&&/8M8Ls@{k8Vͮ͞[X纟8ynǺ*O?_=TZzb3nmV:`r'|x;7ށi:!CWqQE)8:i%. "KkrU\e~>?UXG|8uߪ=& PXO fb[Oꝓg,kj_~r"&yJ@ UV4JƅFg6xz<?W_iJAa|z%MwWT1]e$SEr-Rj(1t^H9 Vqn5vJ0.t<վS6NWuf9B^%EBЎfk$ yao&б[>/b >椤Hp?Jhi/;[׹i+z֤?>`ew"10᰹(}.{߻-UrV~@!celGHf&bYdAHy$!ojNj$QrLN bξ{3ʰ9O:rvf|`.n]bSv\#dϳ69rEQtXy 648( -/.:(PYp3ccːSNop8Kf}QGkzm_ܖ#<Κ蟐_?^MtXxl6p;A};|j<`<@=/='MȵD0]U}k[e muur#2_2`xajRj%0WULbCcy'E):Lux!' d@ Q*٠*LR+EG (/ůB 3m]hTq[^ڏ'H*p1_9}q4>ܾ߁o/3KU."ϐ0<:R,A,ֳXiX5o5ꄁfSuH` Ўw=8~Lx֣=3:L61/=V>mo(|Ù3bXz.re 2's]-)"qD{_\N|C_-ř:0j߸x;׸І;E ^a/x OxHиN\tNĕJ-ۿ ߏ?`^m} @FFtIє{ĝY^ ZYb-u,eirmÒV3mdIoGu@ˇ>^l, sj6BA;уf-jp)ZZ03vmBcLJ}U 5`'29L 8݂ mr-$%Nt0NVDk_PiYi?X"n%U0vQĂ ƤE$(aVF-J5 8()&S*ヱP t~`mf0꜠NIn|y[i"Y bVRѼ%' R P;8TŞ~T҈RElp_& PY߷2g9wʒ҂t3bP6ǔQlHr NuAh9B߬s1,:;LF)u`L89cpƬY.| ^RfG9f߽Z2.,9 f WLYg1:RLPG,8`L~UOj&(V:m6\ڕDUr#8EV1'D΁PYsShCH$JR<.4KM'گ|$ZQ#5#&xFIf-R`1encˠ>/T*(<3w1VyGPY um`0'G̔ y6ȮZ@5:-9! sT ˩AW7DH5)Ȩӽn__;OA(<0n @kS*MVBTMtJ+T\6-?=#hOI3֒6zזmށ\$BNGVC qVC VH=i_33[9f@1=n2Rv TmxV;1bȕht7*`gUUwm޺az{wapXjzpkTrR1G ;|fp5_ar!|8?aXnmZ{ xE,TCMOwWuS}Y_!_~јh:uvz(p$Q8E8BSՈ%<8f`x'4=;A?:U F|ViQHs È,JV0gsL\Aa AK'cȸB4+92g> K+YjVH6%̹:>G{ŮNZ !Dut,TS)S^H`.# q`>mxI/T[g|3;q*pd6<\70Ko`J~agh]}43`g.nmьfʿWuRDO\_럪۰z}(EPBջ+ٻnȚE:(6Ԣ_i ' z?EF3yuZu6=邧@߶1 dq9-ˣJoft06PåMQ#NL\MnqZ)n/fwm6Wu ցH9x'3Apo'5#NY}u1v6Ѥuq^UۢOb ͚6;Sb6d} IJzt1iLE'`fL6@EvRʲ@k7A4EN$A'?m{΅c9~&RўO.O0?ٻ޶$W vV&XO6/}u-"3&)<\xh HlSM`ZVY̗b2&gLoo~];~p6LP1_ߞ}6g3{~YX.*v~# 26Q^s (E#Ռ +G)7G{\Tfb$VK (lcHy9ySNE*:?JeC*{5ׇ֚W2Y:`ҟ/f.<:a1b' =..C|3sOھd]}BQ ;޿o Pv9Įnd6-q:a_֒PtBqMd(XP܉1E4giPq)%kk{SDcq*N&S0`uppQ("uAyX:"V^J˵K34"pc)yCfH#c6#x+#(C x4$!`NO) JAN%IQdEKx`FIQ"#A** 蠼 #&mN #,_, sִ]%3~:J+&h 3>2(VriDr\1.pbc!FʶбԂtE/dTtjMmE:iВo@kTQU:*kvju54F2]B\L Wv:` R$MƤ,rօ*vTRUl_RO*k{N*5x޷w=h9U?.HR!7֊6WQ(@ XqsM`t 4ThtSC"zUh^Hݿk t5Qhr4̍b&NM!_3Z.wE$&pXk\au{H9{\l К"Gt߸);ƭMTF#UO Tn]*)?o;Kr7AGQk 2/ޅ(n(.^C/СD^ ;j-+B՚Uw:c1eAwk~+0F;~~jNfn.X4c!9XpUDZk+'\˞Y\9X5q =ɣJpo'oOrN|O_WK3mVZaQOqBpMUwEe . Z5$_ϸuXDLS1S,]SU"h^llYz}L>&54gEhRPiL0%1rjq돕7R9~svF+̓N#s}LΊȵ:rٷ]{KH#cLB' _iNBFH!$9ۂg9t?o@'QTROBu?)ytYbRK_|@QAj܆Џ5飑F*b%#ijI"JL=aݻ ΰ{fDXƳ% T<>h4pnBk"G1C.Yǥͪ,!Uea| D3Rh}Wl4%DuD]==&jJ%Әۓu8uMD[iI)ȸI녤 ^<I}Ef:c>k^QкA>Q'55\p+d4a`~b&/xmv[.oޞ⦄N5 L~ۻIknY;U(=|W ,o,y 3_|>Vj.@j՞iT'ڻ^c?wW&yX}{˴@cPɪHō[ר% M#F&OV Iw-G4[L T~ dU}8e^FS "MT)k.mzhӎzXA+/)ܘ4y)7#FlGVFQ8i,߸w.-Xj5Pt rB'Ec扁@0(ihM^-KIjT^A9gBҶpB 82-ɼyICkό*F"-x5y &cg+p!pm "$O&\1)v,5^!id@5O.,0>iQ0Au30&d󼸽L4dUŭ؀ۘeSZB Ө~j"R1f-!$kC8g!=SΓ[&JFjJޗjloq5Els5P_i#P ]vZZ#h136&0Zqv&kCA>^fԤ9R\~1dǃ|x24nmmwűz0 XaN!0 Ey(KNqG4#Z@dHSʘv@Jq\-RԞRFA*^yXؔZViӪz9YͺsW/K9Kku}k2=qyav׋<>]]]O.8=5?yp .VorOg$jUϿ?F}׽[.[:3zCOaQQ8# MMiOn *5jbPb:mĻrU`ޭ}[ݺoD[G'u ms4k/D:' /UhS ! ˉK6&JRI?+{|;Ta\.fz7QCtDׅV]~=FlϜo5/xQfz#xؙZpLXvnN&,zxW2ϥ>t z(+5̃ju5 (Pzzpۭru;C79h!|\ |H@ c*ytp0 QeNn=8$Zyȩuς@.Eƽ!Dk<bߵWL$t^#dᄥ 9 $v7BqI{>lIUwsw~i̧Ut\?Dovrhs6r*3ohL®w~]+95%N\dG%-յO?nUL* v+-V[ \ 8NLwޕq,ЗH} ݬ ;y@%>NA1bȖaH"9UMuUlȸuCY:'T<9oW#6THuBq؟čB|SqX/&q?9XS;z=Ť;wS1~Fqd^3A`ܥүC6m٥WoA}OcAE%Ŀn}c6NTXDş &Y^<hɃ%D!v-~U_ fp|kpEp_@0F7 mB!}Sx >DL:rB U#1՛u"b,/oHe ͪ\o/_ rN_A}Fj4ҭ_7_ &(dvvvvmut6~ cT 9IeG3-2d!8S<:41O]o|%v] Vuࣝ2[VYܹz(c[{xx+:L'c{p*i ;_M /.?n \pSlfqr3jSj OɃYGzޟlC q\TI}.@dvV޹W٠n?+"Ah]gZHr>rz;sӋ\ gf?&Uy:gOMP/|daR1޿.%2.7/>lWfB.L81 NmF6mPƈ%<cNGE3`MA\+d(BP)2UR<'u`h4GKT@ݰl钝M0Q]ёpfDWG+ ^/'僛/'*r.t o|/7|C6a22 3,`1G}:^]CTILJP,Y[Xz5 Dw_1F|3]_\{/yr6Lm#P:_YeA )s-SdaQQWr%S#eKiMԉrk3Ւg7KNzFudC`> 7Kxut$Vj2XԐ4ѢTcTU:cx"Ag&I4D &9g 2f(KNKYTĹ]KA:WshɚT,XU{Fr4M2ƒպS=V4TBj<Ɣq/9*&\uRNKz: ovVHM³wKOa7sn9i)t{%6~1 ӟߨ;ו}p/x8OTn40on֔X Z ּ9g=?t6>{d nr^_3haKz-{ V[] bDU=Q)AinMHW.djC5J&'!G{99?n3xݶ겺z`E0|&>iNDoț׏__SfaO "< F?9dt}aU?߮αyX!8 <@[1cۃOǜҒk*(>b'𣆨h*jhd0ⓛ6ESK@zK5H5$%xZQ9a& b禽j-XP}e.5HM0Ft`ID`V>%'W Zk bj 7\C|1ZiVBa973ڬ&H'"6uzX2V`~7"7sg}nŃ2=F^g2GO؃ +KJc ;P8M %%G]XPˑ5bMb#6Ж[)~wqbHv`ŰG_ ٛf{_0k(K7$Q vX`^Іf )M\gn?eJQZn n<}'2"a[%yH9;dX2I2i2(=^$bG "_.?M85.&D5XqTy aìS{UxQ23a*c?б`k%юT=ev{OqXk||YJ:UH""FḌN)Tp2AX|)g$% XMXTZ 0s"2-J8@*F0H1'1O0elҬ&`~gX`RI_RgȌq6AXih4JX19%(HZ"ƤШ2[*QPP6 m0b~FLs~] vߊAT*x sN$:B)L[ꇊ&Zgy ’_EkJYBܔ+!|]yG ^9#[*9lͪr+H]_TO.TtF*;2Lr&O!s6D.,xS B)%tA$T$aﻜ٦{z]/ԛ|U%BFD17_NEÝ[.Kx;|qXLW#mWYwKM%jλj2kr~_łyofWc>X7S n.=FG]A >ĬHd=Z/HZ1,ZtiՑN4Kdad DD:r i$t4t Zu_ؔ/Z5Ļm3 `>KP$)I21DT!cfY8Y۩ #ii3OaA)5J /)c #eJ`|6\ Ĥ;H|H+EoS0CPLfp,$ﻌ)s1Dfdb$zX9"as vLF,"Q# I9MHjV$(ʼnƒ$(˿SqM&4\$R;4ۓ`}}#GHj"\< ՓY |8mffe1jdFؘc.fP [zM lwK3 ٸ8DcH?i.u_|<;"=yHdX|sb[A_ҞxX뵦mk[ݼ ʶjK1ogUf m*޺seG*Dl9kp}/mΡ0-nV q҂;[ (^nߌow}4x,>*?M6ms%w:bJȱ{ڟXi4늺og,h*o ^;&Y[?ׂ xdsH@A%ֈg|?Ŷԛfºgl;-;ga2QEI뺴n(/xKf$ømF|J9DS g,4ͤӓ{{_qθQaJB?fXe gݻJݾmyzg5, _mg TL ,Kn"9%H5DKH&URjj0wY߅XqY/Nlv|RZvl&E:ph}Ņq+@v2~\^rUh_@T8ÛSagj*UIy-DV|CF8h}N7Avԏ 3?Ηw7ujt1"k|r6*鵇?~t'X} T3aps:`MNA0# W6L'݃p[XS{mNMç\iZt?+78׃yX|j7GdUݿƣy y 7 BJamPZ,/rf'uHVBIuZ uv^i-Gs E&H<#RS| 7@l5ԖW[jЊ%mn8;L$ĺI\w*fGB)L=X3 EdAÉgzƑ_ibm/p,gg$3ه!QԤgnO_2gg1궭EJ.k%ZXU,=I鿬#9OyĈf,ME4#D6fq GO܁q[! PDT;DVC 8ӥWw81QQ@9y"- K 3(4RhJn⤬ET(K $nJ'dx8[OqC:^glAlJ?[kDZ(1{` @q,߇-*N,y4š=K 5+>`-Ci Bqu E ǽ' yh:ff#9+=O!CaXd7qwa~A+D wy>͏EЏA'0#sv}lqYaG'eeyzc M]Op?s}pZF A>-Wcp'φz /w6gz /'bs0G%᭚M;@%"} p97|O6z-ACҚ[tq{{Q(+K<;: \zۋZ'MoMLV=X['0j%(( Ay<;K\/iH`r{ηL"7KXʾA̜%o0I"3Mv _ҞEiÛ-=8Bо- ի8_:gw ۋz6)nJ{G낢@irJPpoo7i$A ^GPH IWYAqLk!@m#.m=/-Q'^OzOps3vw8QşÎ?~84ryywu^ $2˺ P~2@A}ޒbjlr&9=Ni080'C:o'iBJ BT+RIa8: %Uǚ G'mPYPQV[Ii_<#wۋJ4fZb(*4Ɛ0AB8Ɖf22 #bTRbgLfBЙŽ}1Jh7xσN]}07!Ty˯蜒V~\ZEc'*kpHsJR'DL'cPPn uA䬦3߼[cG.F ,9V̝z bccZ2BcQPHX@F`LUj,©dHxxO0`6䈄\U%) OgUmF奖^ås̸ͩlhET,UZO0 }eLjQPP Qv9='rO%Uh+G|%11*8VkgO7}:t˝FrْIa<`Co]:*f7LOj!Y'P#QaN:t2-Z3 '˹' $He3yyu-lx?a$IPdeع8} M<*: R3&JG 6$/|\ِ u|yt?Iv'z4?xn.?DVÏo&y'x+,ޮ{] Fa;Fa|}/gu?Oo&ҋ W.Pl)/]kMA?Or.)(hquwQʇԦX=S 'z7&;^d'#G<;0C H lPbSI c1d)XQcdm +*1?CFG(IbԙCC;$EIEQPCJ g *ɌnwB\=YQ3TYs򣤱x7Lq4v1t=;}vA?L$'3eўO|uf`5/~QɃ߮Q%31nnO5z| ,ٜTxh:2OOַ.TˍM*j ܢMAF;RIU[N <ϐcFJrTEwi"v^[;KJ`L~oknѫ?>jc꼪#|v&-(}V3Lk_ɧ~kΨ9;}=#! 塻W_Aݹk[s$rx4+rsHWu?n83' 1O iu$FAz I{7ě\.&e"zJw?Fu|}ӕm8p8Ux[V퇫BtY|NIyBA[wL޺9KVazZj9X쩈Q8ZAs1:;.9G{`D"$JFR i2q*4MĄ_؉@0h;8 qYvrmAn0'՘1)@RP $P#R``$ [0ҔXՠ_ [:M>ÔDAћoa:|BJklsl F{<3T8艈Y"$4J' ̌Hd+0#I ӈD1al<^\{ e)/c6Q[Oe\C0$҆ KVoKZ" rTt11JxFX="%6ՋZi8@ { "H+mxl6]biCzBydӨ?S.s?\'~ʲV OIevp =u8~N/ᖸw٭nHdd jgJ'p{ jZ[(ѲpC,zh(Vh/Ѳhz綎9]hFClVɴ|V=E}/p̪*zN.)@-~pY+=h3ϺB3׶OAt%Bq长cz$%}|-[}xœct׍U[wb(u$_'0Y]5{P88n?iw$(䡫zP`$,mb<,W{}NAuY^ Zt\?hnhx9r<0C2A?,g؛zJlӝ}Ue5k>j%%w@su:ޞ\OCBuIZ>Lj#VVգ_,WT};rmV.keW}+|77?}1kcyzs*7;j}Kɾ{dُ!^oz! "(,Gio/if'Ֆi&x'=;Q'QK(mHB^֒){o:NAQAEt>GuXNsNBk/ݚW.ud,[xdg F es\\:rl)͒YhFfQٌ2"YzXF46v*m>YɩqQZb8ErHA"S`n ٻmmW4soǔ~hn\7utNd&ADT=9g-Z,Rd&Q>Ek#~fM5n0Dq:+6epڴޭܧX)&<ӭO~aSF^uFj;b2/d?]]SڎfWf`.r!ѭ L?7u0+w:.@י>Af7 gXq_MXQ.)SLT :rELRh3JKܪc p o 4jR/"fL||Jb,Zٍ-1:3B̕6;gc7*i$E&IVPݺ~K6~j&ƈ m]H(EeJ /,-FBPLI,,1Ʊ]wBr*֨j3iHWRS&LB.(v"KͽE!E( . pBjsc U"McuDZK7no:$q_]$V7tRD ̚ xF<^X[o_C٢I_mّޔ7ck6p6B~gjG.YIXl~vft$t$t+:٫-t2ᾮblote.\nfK}>5Fs g^:0 }O{\VM|h8ύƳ&p ~X/5$ L -PC;SìV&D0U(U't2zvr˭0͜A;+f-KkcSPj{??"mNz7_tt=,Jߞ0[}^zvcxO~& _??/Mk?qGs!b:' ,?8E zașN>9j~Lqs 8-P9p|UA7?tOuW\[u+颈Zy1mĜ LM E'hŰ͖۵ҳ;{m_*done@.|OE8W aebOu\Atqú҅Oi?׿)~xdPޟ'ϞZ`yᒼQcj68{bag&pp6N{fl .eN'S@'N'sZV?(5ٷA2|?)ctht2>#}~_c llTx P[w3{~7>Bgep޹bƁyMw-hTy Uv1 ?|bߔPXg3C:/enk0.mN&#9^XvUnTq7\w}|a T4loQu( Bwt&u&o=YI-dGUh,n]_7'IC'Z7;% ])X[v{H 39盌_nF`m@],[FXU>Q"B8D($3gs1aFsBwCZh4^j-DX$5BSf|0G2D;0( 8VYĉJ( Z9א:d2T0; sϩ9%N$%>khRǚr.!="{ԤYΚA;M`z&1 { p@ԟ^ #;w,CIs7ḓ12k$һӺZEYF ުVk)\ܱwwcZH&23{!aadunk'}TJZݪZ?v;睢dmڣ)^?KvC/p;iJJ4r .͍1Y_N3֭]" mz,_1hs\i)wj9j+i_&)O掚ȪAAXIV^C.esILZ^YT5#DHRQnhM򍪲s*5IUyT> )rItA4'\՗[ c,H-,qǑ.$ZELjʼ~M<} @ kJ12ΚDUR< 6%3+)V)%a4{J$na\CM!s'(PVSVKwpY%D0ͣa]ƄE[$%22$o2Uiebg'F!\4Fcdc3 Ӊ#&*6H(I1,`} k!ꄈ5OIPb$KmPy$ MCc'cVj¤ \!۔1^ƈZEip\HFiٰZы18 }KP.g"L##P}+R."87cF*F瓬(34Vb U^hy]!.QR!Zx5~릣~'¥h􎍧 cvwSl:l]biuS%Ew刕R]"uGفa# ,!BIX̢5XRXF˼fYGۂI_kAj4t\B_)zi-6E2L^?A0*ȇ.Rɡsk2 1^Ka*|,#SUj#JHR{zKYnT 5~p hx5[<G]Bqx*®i3}(ԇ-RDYɨ14zĚ{*-W6ƚaG̯Õ boKAS-!w*d92}eL&# 8!Zk0C潡(F|"8ߓCZJR\kzÆr Ս@nqLڈF=}B}Զ8LM cw[l\bT`T#:bTc쏶fcT ,hxQq=UؕTa"TGlG&D#TWm*"P2]9Zpsl:F]:m6ڼ/_)1R3p8 YI(.t&]T<˸RRZ,IFkiqB`3m.иWm:a PbTڎ(ĖG5SM^hKU[9GOҫ0|Nq.&TDc(FFL{pP%STG#ZF.{xf|pB>]&(Ew|mj?Z:n)j~W0aJѹ*W!kk#fЊdr/{t+PrxN)dK&‡U/UC./׬˒BR OG|H=ᐲe/֒=֫UȔt,(jP!ZLj\$L>R4[q/c݆((|nV:"똳1#ұ-fZSDH)#ʩ5-ހL4l4Em)ulGOfAvu\K#ޭ/mQ1c[̰ao3llA0yw{`>ZJc6@٠UZhz2ȷؠ`,P)z6s]T vGnyfBmFX ēBeO84;[Y30wp{Ar ,9n PW&Ʀ]Ib:!\I~ہՏ9q4Ӫf/|y("C+JrşvA屋׍[/+iBswfecekJd/y:캺8?'.aX,`@_ ۷@1x[ Ö$(!L 2٠eO8^~[w51cq=UEι>GF*ZM1_q݇eԙ7I.O=OfWyu䴪=Ewgڟ><(f>{y^i2Vћ6HtYk 4_zEfZ/UtJ])~U&Fe7UlҊ,Mwgs.o~a5ˮ R,͆-%_Pݑ!ygq]]>B-x]԰W7CTξ`k*T2M*e]Ҭ2^*Y47"[߼"4!M4s6jtQڹP (FΠ'c6ՠMcJ%/%g' k9AVLvd, 0N|IɼbdP A0\[F560!2@}Asp{g zzub9"kc-,Z=ϱuq?љآcOF-_vYz)1ҝM(GWzJTkV}D ɂi5;67O灲(sOy)Si ne{C[U +*˅(U)ݭkuK>ҊMFY,s|v|Ʒ1hhK 5+Jz5҇z$7Y7[_@)NOiq gk%oN3LS,y( ?z^H1e\Hu}:7RbE'(ek,N|-u)( 6:&=}7"93#Vkiߚݛ݄lZoMϡta;z󨍒ԃ%l=mcb ~^:2'#AQ' $ǀ5*VH<%-BhQvcM^#+ߵnkqrqM.]x)C=/hoxrlTSPxj [h#vZ#rupCfS{әI4h ӘɣasʓOr'pnl Ӊ)C|&/(e<۟jɨd)%O@E2=,&p"uyp4bt<2 E>֌9f)HyS-/%u)|~i&?bUr3g)SI>[gr2/)pf;l9*YZ9I菓{,/.e Hڄfn~-_\[>P?#un{gEWO%ώuH<}r/ש߬u[):Oyw+Vsyp00hDEOXqJ׀>BXdnw (ſ[k1 O RpCoeBR/HUId`AQkry :iQX>3݆S'e/ bf#*DP)O ;% *N.Ck"2Gq2俕*0ZWW[e =G#ޭ/5T L)24R稾\ZyKY^qb.XQ0?b!:]="!x@ ȌM<D8AFQLFU?zAG* *x[A]Q*k֔iOTME/wgB80.$$:p.L[1"OD 34?7d1;H5QZGKa{5YZWII%U$ǚ P0EcxDS2r4O+jg|+yGk*5TŽ"_NH[&OOQ xp)J>_7,, +,G-_rWuWLE\˧G7d .Mq{jN'xZ<h'el8a]5yp<\}BT< Ths]"Y!cDTAŪ#U9H+vufI5 trUk7NZN}S-{RE/y"ߡ* Rf1q"A QB GgbZ ]-$vVP,lĴ{lF1,X=@(%; A) PqT '+(W/ oF5 (g7V#"^ uEoA] 2|$7tՍqBЕa:P݆&OJ{HsPߣORP:%;h*u!zʬ9+E@Mw|=zԖՎs22\xAvEB71O{r&ևe)ж Dk C@@y 6 R$ FɈAr4c١v$Ù|t檷4H6a%"#ٷ7Q"DžZEi.0pZhbA$3ۆ IAĶwJG^߹2 ͨpV-]am5Z;1r%&HM"dۨ LhYt 1":QI̓%{AļUu%,r{j()O~I*ށ,;7^67nkpDW%3x#f_S3w?[݆Dؔ3 Sn}ePb:}?w;fѼ[Dm Mto?a5(Bsnq9\Yyr7x| ~phq*V/&; o+07i?T yC`7?7/9tG%$&׷+0%\qTiU};btx9$dS \ַx ;a $F8ZW=i/x5Ȝ>O:Պқj+Pf(r$9/שwY髶RiʬTZ]cZ4eVP푒7+}V JO]'v_n-Tb cUsSpF1JfpBgs]HcTe|p_($PQ)1phauk+ +DdU@ұ [KǎcTK%jY A*̳…B%oż.d^OT?;&!߱jZ;,bf?ͅ Lr8973=n?~>4j!Hj8DVX-`É385%d?}V 8H4rZ?YTYA+*-k&j|p)yî҅y-W)O,Um0U͝guPLtU_anbͩX1U7c)hI*ΕR~!*x00I'ᴐ(bP@"yKްy,L(QI<稾ܠ*3w&  sf$zB`0-vD8؇TٮIMJ GjU!*%Zs_w"Q ۧ>$RVH4ZڃnJ;`< Zh<)ۨ).sD'elDZZ.8`/%oؕta{:aT 8 !U Y0Z7 oq`-ˀ (A STMu/0_!7I/ hX,^!p-vZ̵vjiu>VZb2lmնZB f>etd.D(HɏGmљXXCAj]| mRYŢeaPd͎HTKU9y=]v:.՜@LB| p*'Ce*$3)*gTQv&oaXy) 5ZqTEBeh~xё$1΁M" ;G!dC$v83yR;UyWŹKwٌGF:mTgT>&e @轭Hk`^*T)i lF_VWVU?֨@=%P;˨8RP6D(vD'S1:cM1(FPITh֪$8P{g`lԐ tr2 ko*|d8M,&FXɪ#IEYjLeDݎYb? PXt%jiql qxXCad;6`|Pk[ Kj M} xWHɾD ,T@Ί-QbIfȪ;؎mɓPEV@,_D.3$1)SnQ v#:Fq(*){ j P(-f{箥Y=ڥq6DSҐY G%\ ϰ^/,H)HEdzxxXz DǭIա8$xI^Qx{i:8?W%_\;7o/|M~U_hgNtl{;钀 z|ś"=Z3x&0=c5{&0۳|Z,Uem{ vvwyjҔ]ܽvfsjEµK^-n[J. 3tKL~,yhFLlE_?Wg7}-.rMMke+p\ylZmWe(KsݛwZi9c~)_矊hYG#bXNI(sYG$,Ge9~+:Qh;dnE2"DSd-zZpvװ6n*My%%χVض\׼w7_7;OC;<[/\^jHyoYv:8o28^DE`7TRTe񁥨$;y˶5]kVzN?  KtHFpC$m;Lz|aT ,@z0r.!Ӭbl*Ŝ.d JMI8xFNC`tZ2VV VزePUSI.u :l_Z,}8Ah8?Mۧ_7ɄffMAӳw6U(O5;}Yawz`mvc2>xLuCeӞVT6N7qV-}4@ VlM^ ^g rxĎbYPN):$~(3RokbfDQ_/6"G,՞;5OV H"iJ'*;J@1jFT^K*FZ.ZN N<cUTzyzjbZ|ʭP;%Ȭ%,L1:& QbY\BBt j;vlPKvc;ٱE-{2ZC$XkdJAˢq!/&CMBpܓ[}sk;t|stj24:C }wݜ(u-!xh?D&B7u0d\JTD y`(zmGw""IMX'ijBH=u_sVUiy)z fO-'(fO݃ȴ.CHhTiPՁeg#o'AN EG^C)y2o gkGn e OҪњfAvvs Ҿ-[[K7u ='h?[/񂛴<&>:᩶'¤&oQA[HC( Tx`qnS#,](Ii0٧X"j&s0paE%ĭUݭ̄ ~/{''Sg:o)<^$C T"l813 חnfM#Ar8A" ՘jl6sUA+]h{H޻@R'ǟk=N,{?oiK~8ذ~܉ir0X*qNIƒA[I5O-u\qwJZWwzrv?i|@[wGWmioYBuNPLtB2 1SX.> 䐂/耵%iLuu wUBhp*Δ!֨t ,OkC1|blv۲~jzWV.d=BDO0-lm&SFŞ GTbZQ~aFrG~y3V,P@}NCԹ 6["jc͞uʆ+9s! @; Z12ywzѵ$y [CλP3Q.3S$yboVܧ S!{ڃ2AwvhXI)A;}'n^P߱o|`^[݇]P;` &Y1>G飈^XGsaXO|SLN|TrE(mv𶔡(PJm0x=#"wZ {N̪KQYEUwkoB½TM)lSɔlC L勇p7WU#u>*­琼#<;3e?9;uM]|ނ#<d3{)ݮYo5B_1eɘ{QkknVS K7.l^u*㗸T`kP34H GZj˒%0}FDŒPGnn7()8)pcZ׫8|}ssgUKۛ7gY?3g%y`#s4[*V]}߅7! âA-..uvez]_߮s`>~>=D= X+E..g&5l$v6t!7r;yu-{^'Mؾ鷯8i] :$?ԫ$9o$M&V|E&jGd?Z)ʵQoNMD.;R mIx5۟!9T~]U׶_G|oqڡٻ3{rOz]e qpd*]Ok!]TMIA]><1^=4UFovh]٥?,*t#Ά;$i)MigLKORdԜ3sq S*zQDV>sQsEkXպmr'ىuV]k(uu*ve3oBBizn d֚(:W\^|6CJq߯2|Z{ ? yCv}5-W\'f'I(Xl?? j+w?~=,>\B^Q =n9>ˋbX+&ne2&mTm7ķA7pZ{%B+bVWgWyfξ9+w468 7ox{:F4VbOs}J^.י҉^bP9gf/m]_BZ6gkIU/܇ w:;TcM6].㕁I= 3f6BbfYCiu E;H5y.V/?T{_㩹{h JQx_P{tj9Tf;B;ёJvqtUbh~NJU=G':„\fdQGѥkr"psk5O<fSh?,ʻwGVcu_6O[i[nUq:ndrI4opccT9$. ¨$@_~xFw^Ph ^X%-Jә^޴,H?ۙt+3!;Ί N/׀U샷tv_h /$Ԭ7t\|vDY4/~lΑUv!\cK,``zAK`śؠ٠k\O.mƳP?zN"T+Ō}}y-C ;(\kwyOA9~ --ehGG~2zB'hTP:F1ai4! cCGPhabzw8w}0͖T"'`ℎ҄bc=n%:qb@OEFw(dڈ+4>:<7D0T+x.^50?P:c *%!\˼( pLt3-os/U$2FU$+PR)'(en(QsTKh~!iA,RWer/PXBxɨ9*\<5pٝ$YQm%8'!DY#abA(<3PysؒriO5l>KZ ]JR+pVRWhS h/0IiKrhk(}aFG 4@EέPʢ"zj{״Kj:g(՜Qɜe4:.IR޶i& }'Mzdlgb~;ppf[S&diՌ:gTTmOן4*)N3))EWȀ& & .\S&ӓ)fC "9#e۟}GJ R|.'!F>gm3:6Jgw2 RSzQ>#f I!<QS٨k~݁lT4C/?qF/1)Y9G R𩯬$$d ŁD1%]l:=Ste5NOsx'p-e"1^D={KY-eF&V2BOBN.PaJmQ y݆Qs%Uj4oy!t|L;J*j&[dV?x |';F섁+`* \dKG$" ԣ:1=[ {_ջ#oWdIuݭݺ(;$W)_RIccʓwn%qdNn6?z}%EOsA %lYpyMH.UP0"$[-P%휱Ό%N^N:}nE %T͐yy5 )@;?]D.pȱS|.}DjۨQ놶7 ш '-؀EdKШ 1[㩭_VMHG,QaqR)9I<t>4FѨ*)'0%$)50c:]$ֳq146DC:ss/e _&t'=Sݥ8 J 6j0 {|Fq2JߚZY>2r"^'H"QF'Io;&#nE_*ЛHL>^޸pȂ " H0*yQdN1R$Lفw܅KH>!aoθ&7(cB;4'hr|I0(}F]}Ffz" 둒OpZ& :DuRO.d5J 9mORb`_gn2B{sOߞburoR}~wF̹?fKs~>xr<5|4 T+lvQ~ؾZ ўY !v{HhŤR(Ҷ˨,sϞFKQə*QXKS$4G%kO67v-&C`-L!В@A;NA0t\y;PöȬ9V%qNvnTk+ VH!TE&`sjְC5pqJiȷpHSCy{I.HŞk@B-Jy E bLxH%!f.p(`{uD:\,j;Yow#e(jdD O,1C dGD> joJAl2 Uz;aOo qp,&'YF? 邾OCoRϙOI-i_`80oi6 JG9YӠRttYk3p i"[g0FN/9C ac-V"z7zI{%jtQ:)Kr26u5)h !-d4cSA%ҍ4zǵɆ)Rh.w> bw eS!xBd#.dCN^K#W$Tc/"ϔx;tLȉԒ+ɘ9ҥNi\k?&e ΁T PB GsF%<߾sզ%<7RCo}\Jn˖ g?6W^̼UrާEcRoόDkW:0%#ϰs#ҁ ۝Ǵ[pO !z[5l EvH_w1}IIdDzID*U%|'vaK^SLa1T1ՠ`7ry7q\l3I׆&*YM ص,]r#mYoۜtdVwn!6H~Cp1s|6ioTJIjc#MP1Ÿ Ҙ֐&[:j;#D']~>.ep jCt92kb.ku>G߮p[zTQI!@^J{v|^aAKtiu&Ni1M B$%Bq*'lKf =u]RGeFIםEUon:ot+a%pxYR U eeD}rNV꼻Ť^DRT;S|_H)qDrm;lnBZ;ݨ,.H]jOMvZ=J߾vg-M2HlmV=nzج6O-D| /x'|\?vsgWqVO'7sٗuH?Z"V,LQ-?;(7{wm5|_7of4.KanaUҀZ&/U6=27)9yO y@JTTHPȞ( A攪RJ@j. 5żaOIڔsYLYc!$T a5/Hq;.LIM<=q%|q ShlP{q! :Hũ&u lug}?cGT6%&cudA]Bsm! Fqxy2=&sa'?3@eU+Ӳ{zi3}=-;1"yZB$;Ö}F`Y( '9S!tX[H,]fEw&ݲ[aʹL_z)FUeP+5%%>9Nfu>N{סs.h"U=FDXʯj;̉WMd"Ԥ ]bxתTT@ G 6>"td^y9 ? Qa\낓/E8̞D.7p'p̮@۸J5WCGe³b `}//V.'gU"+$,UeM6J,T*Da_9RS DN~FOGsJ(8G/J!]^{rJ(PstJ)da }s9 @|ME!+PҽjGVHpL&u'*YT'H"M/M;?6"/v1 4ɮK9?ޱf!~`yM Έ&{30, y2p $ o]HtBdp#6` s<+ a R LƥO'Xҭ0v)W^^8z]td9x8Ƿm/X[R"$BƣF)?I= c04] riAv %EgVhNȨ1((91W7ۣ;9:Ld6W$A#Ĥa=XC7+K#gR.T]a&P`*ˊ cPQ6\\IS˚COo" $#N#V@ղBe j S5CPR`J-!ꯘ'.p'!\mT;gT \BTݚ$gs W&O{ڵ \=8v!A~\nA: ǚ% :zO~abA•ÍN]^ypBU@~aGMzSڙLC{jwJp$i"{lpӼ"'O8rJ`6Eg'OIӛ{ztg샮e?Ĉ~BTOOuO~ЅD,t5Vp D.2`~0D{Ւ9IM0@{Sfq}iF>{סs ".YIK]-koɿ>sx,:2*"6o[Ukh]NjȹwBx;-q$Gܫ FK Tn Y6Ĕ0)TDws<^@p1JH AQ!SEQ V˺*6^bm,G!p+R~_ pT'/4OE/~DlYD 2I48NDz00$; B";˸Ȁ̱W^ 9pGu/=&WH:>ZRP8ť9 $cE L5W% 54Q8vUy=DJESBGTb.)X.(/jjC!LFhC)X[kc Xu[ d 6uqO/Ĝr@x3*˻v ]IZ-2; )ؕRe =i7,.`y.Z89TJR6{v"F1z( xr}@{n՟zit^= R=U슰u푮`߿֯OMX=Jn̬'w6by7{k\Ll=ս)g,6'&ztw{  I-nt~nRZ=MIy]٬d;ϊ)E}%s`Yw'e=ׯ-ͭ^7 hNs*;M[ ] zA!*^ qJj;߸bO~y/zyH쑹IM',i%`CvON<7DC$ftCO 5gavꪱZEAZΊd[TZQ^ IReKc(0K)%) 5 0-72_tzn5Y]0k2rvͶX}~dž߭'١*qXWȍt=G;=[ہ80mswC0L(fC0GNw^hʏѷ'# KeJB#Z#2 N4f0C*.0"ZhQ2kwSjZ])K0ڿ;T5%2XHeYQuM N4P51BDكYmb)aYku=wsn&F8yMmQ׵we;ݸvexK"2&)e{ 2b8_pO.SIp~af8ޓ) =a_NH gi!<9Wl:r`pHSOLtXDAfљIͰ0jr($cLj $pӦ7 4;L!PG$ν9AQ߿_Tx?g8Nfw{5Iqg Xe-QgaW_ս:$M7iQz~O WugF r[ ]E:t_/|;ip/n59|nN`i _H=|VS0b|&L|0/0F=6H0٣S+G ==zDGwUXRZ_o*I_X2GSj(I_JFӐlџLHBite=y ko%FP بK+Ddq6!$KÍC*1 x3x< D";z""C9NI .;E B0' 0YFyt $R=F: "f vˊX%K(LYrCJI+h['b>sStlX~qW:YM!s讈sij٦X߿8}s[.o_[M뷛['L闿^ح/Ne_4ZpUm1LWOWppm6zne5y=qfunnvd|82fµK)OyzXl)`UﰀI%{jO瀊$5>bPA}LM9;pc G,fKٿ;h>/^?LW?GX8eIayW&EW|7;G2=*4Gs0xLr8BBsԽ`$NqoH<939<7FC`*y]~q.%Q^qSi_.FE>9P?ttdD¡m^V;0Kt{O|M'/>LӁkB+A<3C1 dɡju)y* MM @>7[(ٛotڻzA^T'EvDޛ!+"Z(&qr??'!ߵ :!N]ݣRryrf8E}mF'h&۔\U]rKYZmBgf]5=; v^>;k D),d0)*X"8aXRH3=pBCw+V=xTV:&+Ds T^1ɲۑE&#|;_Ą :fQs NX\ আl7֠<ѦHC}ʪe ݏ ]?cg]R]%8؉4-RŹf ])S}шQr ͻJJ%y}ɽyJGqDPH+U 6qk$DD%YIE8'QLM$.6QNUJ!A^6V1Ib#T9`ƬDcCu-52R M(c#bqԘ\YJY0?P_nӉכwM8xx:TP`vM^. 'Gkrvyf`i›>7pC!a( >$4]~w͐@WW$@V!* f~(6D{`= \Q1f!;= ]Dxq'ݤm.٦02켠`_{%i80./p8 f^yЙw2 8ݶeX5yOD'H3sA{6ʬƃrt75Ϲn0FD3x1B!p]6L>j}.pyöy ij5R]/i}-(}{?籚_h;^]\>%ڪ?P&đN',7\}ZrE54.|0SX8PA^mnIh]CѶ>EXtJ$u1$J BBںNjXT FJrdajx>|pB>ۯ- '0) Ӟ鯾|$Σ//>;XL9_l*S{f9Ieo?9ĵLVo*rdH*-!(&[Ibxbk%sUgtHS/`35{$/ >ݞRʴZM{VTATi*#}}'eK\urI0S/'N~ίk.DAf!.K}'~D^F ww{?[?o{@xOR9 \pU`NZFXcО(/e aH⦗ ޕJ)bP}+Z)z\bIAg]xgmY-w8L ;@z˜:Ui {o#WeZ{HV) zsuPXLc<r Fn%u,o _zkڕƒtHZ)K"$yZݧۙ UsJk2/|kף~ܶ˹`hp+gt8\j_C 8U?9N `Zo4Harå+V=)z6,}PGv8yxvKr0ZbMo6.WVjG|}iہc.@zk: <|.$sTlG:T$Wޅǔ/dzwJ1989;h%\U#V5jk=N&_,;X\"SˊzVⴹlU ho҇| )x}): soܽ.d4F_t.::=O]턀FpYHUdW7gmb>/lq.\E 𡒼|XmXGfK_d$`yz{ WwYˊYe`>H I-rל`L۾,O | ”:{yuUl ` ?ǣYη}j寕1s:O/c~#PC+9N%og@s'rſyO-hο;Տ7ړj ^S9ϹP|*ZIl#shuAթ2t,)l7mݪ/hА\E+iAc8nuAթ2턇Ki.{֭z%֭ Up Ko_,:!eSӂ$&DZ 53 Kmq=d8bd"HYB{Js]l\.X_%B1Yx2\zW @V}<8uC)-xu_7sS'h5gr`)lʈCK}˩W5LٲjΛl@!*'a#b{rjjUSUmWkz.8Udc?߭8 =/H׵_DA6|.K}׺~l0r%r~^[۷^oh!F(:aL)d̝U*aG2JgS)bGl#feSd\/k/Y.ŭW֝H,nnF_V~J1*vnRMߟ8Mٿp^<lL%#&\煡G><{6jOvQV -ۿ]$S\\]~!SR %Y o&.ė ƻjuX4]~lJT S՟]4n4e@$Q{D F >̆ChwlW{'c v'Zu.զЬdxې I|]6bizMJ?øZYoA!x"ӱ_,O.W*=ޱY Ft%m( rUdBֳy^Y5٧ކ]uHY&"LB$dBPD$*& Bn:`s j(X. Uc`Qw7cY=ZGQ0?3^i?,cy{lX%8dc„q4!IXr䤎t&:v0-E> ˬF+ +,A*v2jN'K+{k*UaxRŷhRm"m6=&I9EuFZ콥_|.>ŋ xQ %R.\:޵6n$Ed#M S&ѝ,pfI٢dJjQclbSTuUaj;֦A Zs=:d4U T!$:*oņbGYLߪsUy9|MOueVh_ai G2~dx?R=7~ a͌o䨪g%XAı2'bL$Iq,ubL m@EVd83SeP| 涔b"Th9D2AjcL&\sKuB1`z@B,׮H HSmƚ# IO4Q"1!Hr "ED{ۙaֹ5@2,fjB<U;ʴx;q⭺ y"ZIr]/Q}FV!S;h^B DXkڭBC[EL1yՌvxUƊQw;[1=߭B}$䕋Lw 7)g,-LUK1Y\ω^zUaX!$eM'KA'-g{OJ@̩}XRm#FAΛ:vhQ6Wbץka_S{&"+E(௛j.vb"d/_EYX4'kA3lt1.Ξ^0"ܠCK32j*.DaCl֡!浢 !%ԟ]nhW'Hݥ!8n !}iJvI3}1թ~Mݿ}asͧfFnur\b*s,^]FQ.حX\gj9\VeJ*7"J1B c)B/(_ܷCYlN;'WWiqD]ةq0!E)q8/*؝ܽt])Ûxk\zS]s[;e2eNO{s)EJ&nb|7O;m/a\ZXJd+:e+UB.4亸Η\3stUpcǝHF;ڸTD*p#ޅyT(T60HML%[-!I}C1RJ;qAmSw>$\8 g+ߗ; |ģwi\ewp" 7z6-ĭjX^`zoHt?~'&zbMLSǏ5inqdBOSB14]/L:-<W*d{:KqAaf>|<̒7 $vyG!Ch_yOqlw"%B Rhќ> w` !uwhw jh|3=c!zQHME A&0i>jFy&0N@*Qͳ{T@dCP* $ޓ @Xr&DvǍ9svFs 39+ύP])iߩqߐiVRɌ05Vqxb$kkLБY$l:2[$2,eD$&LFBDMp.9;,ET)#1Ո*,8mr@ L. (*7/K̇w*|xu2vf3#B(·w< \)&}>>œJHRyjU ]ֳyrp3fI&K"M5,3_Ȁ3OWlj’^,HN*5қI6aw ))*c$ p ٖ) rIoi WE]:w'XMdOO?Ǔ6|*Q 1{&7BS3Zgt|w, /!NTlӵ_s±Ƃ?˺mS䯅V情Sk;`nW?RwY\XqM|7׋/^>X;l-g^ p>*,"?qu=:JZz.%WeY9 N\T-#TR BRzReT+ ;!XIiJ5RzRJTFO:!DIiJ5Z|Z|D!,gTKKeKiP97&)EK)+?HK3QfykRvPD5^J/QJ R (fj(ueKiPV?}k% RY>uR ttBJ!gT+[KR'묾`]$0q{)D) ,?X'ΞH`VM5'K/[J) RJ2tBJ) ҔjND_mrR-~'vBJYO^J/ZJ!CjbR)RzRJ&t7TKI) R2bůr.#ȶWR|4 @גּ}N5O4t$79R;rRX2zC0UاtkH}z* bs=*;y|:v؀ @.\QmuS_ 08ڛV"WmqN;h6_>fQp;3q3f|=FNw9HhqO8t {dBMC[ lӭcZL!8R/p7듫zt>?xLAۑ:i^6O[ljGuN^}u,S ,]6B`tlYl[rKsx>9*Ӈe,6'<Ӟ8 t:}$*'RT[>< =n͆8B hx7Ff=.cf愴cNR ߪ|7v=dN}tk\-㻩'2/=/-9y4)47` 2%DDcg3NӉpV~FOGB_l`l}q z5qL0=Oll( SFN%%6 EJ*ePOhbRz6?פ:.@;&ڋv_Gn_?%]iz `PJ iL}]uu!Dr^̬朢9e Gncͻ, )SS .茈C:ʩ|]sUbX4Og嘛; itmX&Ʋ=4A *Q%됍SaK*h~GB(@p6Yן65N}XѭY~_&:z[!d7pMoY#hak>U4t\w[eWE\=NG+Ё@LtI@)[`}6FQEDLr7 p10bB\wsG(A`w8N;}j,2}HJmc=y*Lv-o;d@2q*qKwx*~6V2 4f&K}aGRQ#"J85IcMO2.)7~Rk$YeIHr,BP"1Q:"IbV!=UMgMjk!o~fWo*[9>2yMF+rao_t!+Ewso}?"!ü+r?O|\Cq58QyD2s< XD/@}}|DQ/GFa_Wө{_X= Sj%ViQJGPVĘ| IcEѻx|XTl nl'k6D 3 lKb, D`}I昃$H,6gӖch}eVʐ da۟K`5 JQ$61]3O'L S'I6d Kʬܒ!I:ps]||4PJ o!|71ޟHV i!T.$08yLJ1/vz RΉh^+E5Z0BEʙ*(!dxsKiyiT*ȋ*BV"TW9a^Onو@XV#YE S#J0`䜌EZ‘9Ø:}r,\ql_F懫AW~|I Z\_U R{P.W/FO_հ6r"Vsý{m֤{%vCOi ^e6dnƋONp;_Y&d.yX-g3gv;&^Wc3z x:_o@at:]]ʐ/8pT.OVJ)vHx俿M9x?7jE{3,iG*9ok'zInb>>}ϔckr2}1FTj_px7g2SXsW okpÒsUL96w,7Bby'yc wT:TGyb=}≮=o^qٻFndWgw%~1Ydg0ɾl`tl[,)$~Ȗ,dnͶ V7}bUn\mQ7[Uh\_8q *2J&ύE< N#>$#n74_{?u6dv X7khLFj8&U>- $dҎn Ma\_~Шx[mfSA@JKwM{oc t,[ W ąADJ b2$WZT8TYHw)8ZH1oգ^`A7؝co|sۃuG3CpZQc!2[$Ssb0`Sp5M9Ma2s5@{,w5&ˑY|!kb🋫m2V1gF&tn'f~d 3ʝd{W;tz~ 5K+"w2Fڑ@~ڽYd\M+7.A2%x}fi)B1gn-]vDc-<}SU!!o\Ddl-ڍe*B1gn-",9-vBB޸) {| P0'N7#9YN{!y_\|`N4Z<\_\~fa|~'x7.ޙI]ĕj"\a~m ;: 95n" zo9T'Qo?a@4- `$ԋz{$*L ^?/:bL0*/(OTg%4)1RWT(d Ws ݻ2q s Ё')ciciMŀ0={pyBNK:`gWNrq涏n'mgx aB8RD:]qj}^iR)RaQRagHmMp|N"L-?RZ9ŧ _ L7jEP[#UtIu"*jVpP+xZQ¸xmHƤ )#'$X"$C%XHXjcͰqW 6$!qI_M {0bk鿛oSw{i \SwMP,2=J8N}4̪cC:n#j{-`sZ",zOv*MMjؔF*Olii sh%WGLtO|x?~L G4甈LmL_ *+UWvLFnնPd[fZkJAdo R4;x̲)9ʖt]Mƺ MgU7W %d)l,3ddz`__z7n3:c.]ն/S\UN`R}7#mhjev7Ecӏa<2>x:~I`H +3Mپ*hČ2#Ic4+I-f9hZ9Cb$;Ѓ?0~h7\"'c<}(KPazJ.2Y>[ܼڵLs ɕSx|9oaNcx?9xcAT4jPE7Ǵܱkf:vM:9Y:mt It:锴j:l4b )ƳJ dTCٵ0R*&Yl; ()TB xY @uGˌ~\te#&*(j†KN0DF'j aE賣T>@P,^%"D!cĉ4Q! &( PD1L{&P p\;:V_ V' p'QU?=\-lO7F\9$6E|AdyzIᣱy^_ X~+~E/5=p|;V=1+{E\((荢u#~@" M\^nbgi`gE\K~-{>{覘p;X]ӹ0{@Qg=-nm{ B[q L_dQXs9;C V8]| -}~ a B}\պ=o6!FNSP|(y렋`V6hrt8w_E HMVKP ŠrCx]*g}U 2C<{q׉BH1&{(%7sdX`Phګ-YLsFA:kb&b-a1od@$HtBZ!֝!!T`4aH{_CdlNIbY"A"# cgHP#h,`d0"( b*TG0EX++)jmE=^1u՜O(A7Zkk+oDFP``5Čs_ Tg#`^[IH~t`͙m#EM93$1 1,?lHڇv uܲJ-XoWDZfWf;DjiCbJv\5= !"J-7>!<[ p[n RRdgmYU9na3Vܗ]bz1,d1;p];Q͆eҼthuO(Y jsi|y?awB5nN?fqg oЫAh9 6h[ \ufqt|6Z^Yk sq^gRT7bl!L*v.sn8J@Hs $+֨R Pȯ:R CJv\f 7F-Ew&fQg!#*Gt(4ި=@P 0Yt{qu]v#@J~/L;'>K̀@40d+$ [t*<hlaXN{!ٽ {E(耞__\~fa|~ /.ޛIV0?U&3a'Vqc,@2B$$28A\I4*I0QL!&H2{%d 0/U"~~XMhGe89ӆ $;+#Z`Lpd0\ [23$W j+8Ε) J7z q{!yʩBeysĵ8I"b,% &G,jvrL@m7U$AKdI.lچtNZ{ZID}]6=!՝StKt =:t9TbP1 5]N^]8QIw1F]jѥWړwꋍ|rDM _٨,٨sdɶK"%vl8 )5^䲺qn',$OGvN\L%BOv NHwQۉxucO^+JPԕL8pI xSV([97%j9aztQD^@3Q/½l8EΡ~q/!Ű;=^(zWp.##$iWzix}d{׉BiFd6s|@0֖6˥IJXh/d2V @ GOA2W5T8$}D~1S[H~yetNJkʡN{9`]hR9>G"(/,՝:w0|*`215,PK#€RR.9XL8UaXR?."(IBrc 5[]RtWVg¨IIB~Ƿ8˜š=_#J!ds b!m?֘_p7\A땭 ʨW~my+KE;I&1vOL'sry]V.7POH$(1KDSCA`LDhv X hÒf)e |fʦޭު2vbmriV[-~I~xiabBTSgg{AhLbJ 1a3 %M w)2V(PeK"wŻgcᎹ"F> מQ cjK>2IDF=QHE8Z2 3`T"l"1\]v SR fQA^"S =Ef117T$!q&E/K^8h :# /edyb)qa~D B.iŷ=zl ,(Ot>?FG`yVg;v۝OD J1Wng襳 -uw@x>vłH7)h}9/@)l)(Ҕ-\TtAmJn,ZQ CDq2dD/B, ES(T0"\F `!;މPޭ1\15!AL]5 db/ux QA V *b݅߭ :U1p[ST(7銼:evGqmndT+ĮZ"kI)Ye#^/7w%եb M # DPEb ! *&@U|B!9 +"]"0)YkP\"nSJ+5/rU1+ d"ѱh7]&5I_5iߧ}J\ &ii~GI2k bݕ@;оj[@iBf+`cS$9bg$^'HZΫ`cy>E[  [$9|zZ=~Wm _6xҔ <~x[h+~W* ww5H^Zј.Xs6Έ4%xmg߬ECR^z-hl7I|mMS<RNСk)f7!+i)[-hhjV C. ^5^ 8v)1ޣIg̾)6>7#HedۈV=؀nZw(b E&KZ9hՀ%Ր%{Fa K>8K*])%rLiHuphDЈ)xQٓ)mR`(iGy~qx|gp؊\۴z{L0sʱhhbq g.tn3 ~?^c7O47DdԙvYAuUꤨ܊> w&H`p4p3=?rI||67͈Fn "f9@%T+jrdXD"yWq7kK;/ʵ 3桵ț]a%s<," ͞VsdO3d@hU$ͩ}3$pgGc->W߫Tw|9F N>'w~ ܜœGcЂGGUߣWє7=B@}9Algʏ:ٮʏ=Z|PJ؞qݦ>Gn\Z]ɟ'(dno4V^ۜ{`GW1OJw˓P/\vr*$mSm-w,G>"tqն69.5Im+CukJڡ1\+q^10'.fiKr"(QdUJebʅATPjEМ1?(`H=F4D'~}R鼐m 1]m8I6l:a]qIUm,j2 Ibbs9?& :LoE`V3י]7ܻqpR\w|Ƿ(63ҙItr/7v8P ^y)C.P[ggKMvq  s: R a"9+x2VUXo B~_v'wO U)1I>_IL/|| w{ \^\YůKSS")&B1ҳIZ!Nk<K5F|f}Ksm`;ngvϲ& W֝pj޽XzJ]Ą&K|~N_/wy0G\\r]pvV ·g0I:+X62C\mb`'fK$ ڃy碈]# i),nkY\pQZ.8Sc"?+Z*4.նo\]TtIޭ*F CbsFlXwja A=+  ՞&cĉ/pAMSawybwL0u+sb59rq_I Un5yhk΂igEǠ?@tvr? oNY!5a#}YͬDpÄDA]DP2n<*kJ׬U"L)3>\ ˦XCi:DP익xM f[jҵߠ)f V6%;Bs=Xr0Gjn(ty*#6{<:*)p4#&C3A( ,Pa0?}o,6P= 䥭TU`j8){ZJsN=˃a9WlíĶ_8CJPG-#t86LnK'i#{G Z*oRt~RkR aD4>ť?6zX?; Jml("k?0HMТ80Kǟ'Dh|BVVVe.:xxGFxck\M2A YB_R⇁2 B ־:XlW}0D8\+wi~]l0([0NcE웝ٕ)}4?!ST*^"eV8!1B{T1ACX@0kEϹv.*G1ל6VIw87'Qc~Ǻ숽0uZtvq|3-Sy >,?ժO#'Ul]J7终w!LViqǪ;]۬yDBTDB-w+첰lh<fO;dzh duç "i0#"38$TPn(2Ii{8UW|{C qGlM|QEu >/"R1EK¥R.iֵY%EȘi@xayaaE^0G8A 5#TbjVQjaZy"8Lw7LQ1ȚQ!pɍBqM+ jR?l]=(sV$=j\J  T~O \EE BF hE%ܒ$hD @!kc[%%~*R IK<Z11EXγ% byB=G^S{OFKu>h,9o3OByq m}JEJKkmVE=XiHVE;,X,y`@-U_C&uge찍@Q?[83tؒP*)qH`@>RK QʯX_[\B)ӡtM"qAYT< VҚW:%JI}ݔZj/(=kC),YnJ- JHy(j]*j_D)RJ+%^vOgRy(=7~RKAQ1|z('uSji:8o RQ `(&J]f3G)<"O((FRy(o J2s]*uiM1\Pz~(+OU#@~R+~Y9Js` YF.Q Rv (P Jj!=oC)V{|j(EJjEsF)fP | &nJ-D7JC)RQ;]Txm5|}}EDw?SXL˷bL}P҇(u-vy1Ktǰ#3g;3{#)s4V2|1Qw?fJxBbiTæށs$a=Q]W5Vœo-pT0V#~GֳY6~AV#Y#nIJL6JsVX.%]SU=צxWN\]Q MGνeZf\zX6PR3'h %#+ū®3\`0`SP0Q(\$YQ:dB*I%zm .|*RxR`FuC=ӊs0diQUR-KD =YfK!md켦RD.~1ѩbNia`ԨU]0:♢ɏQ7:ϗ[=u5Zۛ7oS854bT~k|sІ }ƋR &[MqL4`4%%4Mw<=7EtT@Cy⦁ywԅ5F!h %ueA%:JfHRc \76:ĵGm!fG9Ed"b!zg f@ pf$qc47.D)I2xaw+ø"S?Y0:.ڔa|Fm/MPȺp UDb0eVH[C6d||GW$_<2 =>rz֔dqZұI؇w}UG/ՎOY3q'+0|Z^Ĭ}ʑOFG҅T$?M~jFG/}(>w֦}ybU6oZգzxW=ڗՠ}k5h[Z(m*|ή*|zp+n­,j"#{*<ܦn[ha:+gixJ ys7|7gcfٌ kg$xK;@P*}ӹ+Nub3=n@M)P;۽P?6HYr%S6eڏ+9kpС(Ѕ\7\G\T+kn줼:Y?9UTΧoRahJ&}kr4q EZB"q UNq?<a_tmc)yUzyq_5b4B)ung>i/7I`Rm>,㺹XrOKw_4cr_¢fe<'sv”MZz:ȁN6xӭM`ޭ{^uw!0j+Mfw몃tmw;."Ŵ`0Yw!cJ _tLb zx vRR0/pϒ|zp+nV5lY(l째Rp:6C6lbWTY]TN(Lԩ-U *>aoCE a;Rm=vdgw=5ˏȦEO4⡬)cK b#>Xm!@u(B缈Rx}-+B%M5aWum dmcSv_eRw.F?Soz7}tW1wk9V_Jǀ]SѲ#Cep݌U:w몃tmw;G`Õ@;hS)Iv;zGg"s34.ߥ4U^5ҽEHts+}`/䛍ޥ3WǮ&5=DeS`Kar{\|qDχ_6&h?~Ԅbno& 7Ỵ||HOM-EKž=M7_3I'`kfm}Jbe> au[`-p x .&!#/o J֊ xAWns:t(؞_LTWUhes[.yV:gqW}ßlwS͚׸OIqr7G1/Ggp`[&a.9E-V쬇9EmU 1FըbY &S .x!QF;Q"wJ)KXF[_J-:,&s]WkʿGî'b)d$D`˵B"܅M/$8ۚתh hû$#Sd;7}k8QBu]կltrx 㐒156#^o ,]PSM#/SK~η^}SWyhJx6QW0X!Sb/72`W#*B*d)1ZJPBiG1đ0 ~iH.G X3:*n{)G$p0G3:Fۯ_E6 -MVCqىtZ>xpkQ}SS)9Kuc^/o?$ҨqYVB'6p7nL`.ؑޯW@m{.,@ MpWmmc K=;}'{=bBy/ rܤA-zG޿˫X _̕eg!8M*xW\4yǩ{q7a'h~N9As_֊Bqң.U JsVA( sTy5NDHڮ ]MVaȰA(O+jůyw^tnT32z 8FbcKecL*C-1ieH8l颔Wx`)u4`O+;Vߝz=Tע=g]GpODZTyj|&թF=  Oa-ꇁ25@AEq/,y: Gc;3Eh&2դ$B#,j 嬍/#Y*d YvRܻ=jXB}MZ'mt@5@d8΍F'@E،L3R)=rrѯY kiZE_WԻ\fwT!ufiK XF]PM qv 36%!)`:ܹL:8'TWɇϥtgz9_!;GQy Y7}AnnS"v}2H1Y*uQM%1YߗWd `5\p{D.dt :dKv{E$jYY~]par0RhZddL n|4tW؍5]_vCR@w5;mEQnr,5C kMkƫ-w:rl> jםf̉aΓ'.&&GWpݎ&[5yqxh5)YVf}f-Uqoח#slvFJZ0xG2-I-2>z,kxp)15F]cY± |_z7& z; 7]Zj`7NQ/Nٽ:A~$Cn6;\[.4KCqUo:+mJ)kfT_C.?m9 2 vڂ@ygX^+RgR_iwג{,M$b@x_SՌAnVI2`W V|wԄABf~a˴b!~1hf , C4਻(47`j7R"Ϭw&V\ߥP:h9h"V#-d`N3;uďQ!A㡰ώ8mF֝#f(g<(Pg[\ו 6d`&SƸ˚[{9-ME /Wvۑ^3<;Om) b$wRE4x_2}!4<`6|S(ho%T,{d Cqzq $/n6;\[)y7Lޭ |qnR"6}wsλg=nM0hSȒS:$FfBAυ<*{hn7!KzP~~#~̞6*]8*BYdOA*Ͽ>n5z Kco܏@"rp-vryUϛ">_x/wExf2 MIƛfJUm Y-Fy}.Wo?jg0b߱d9P3  qA "1if ګ|Nv)Bf4XK,HNx&ٍw2F%hqFxK;Y@q#@/ue w>7pY{ /?dˊԏjF^lهj"kFE aj=Fऑ2^2W@#ZO5>h(ˢ s(U2dQ4 QbT01PV h* LqLb-mra_FԎN(AӲԌ۴YJK БwHAӲrcUԪUnB!bh,=',K$i_!KK=-KM7^5Kc)Ib XzNiYjJčRLXɄq K1cZ("KvV,%<礞بzcX4KʹpӲK۱|D XzNiYjmĿn"fR`)bv,-u,܎c'K1ci.5ƿnaR"ud`9e"7^7Kj4K!ƼJ;RKr[_3K_@]Dn0t軷o/%)+gYiɾyoj)DrhYtZHSܚަ8٤>/GQƮ+JoU<$DqjZ⏏Eq7yt3 ٧A 0<̼WLI*PrGQ[0GƠA ^FLHuN V.p_䏿ɛ|pY8ZD:z`nBbQ4^)Ј"V8|8!.U Hj}[+-Ml%b\~){1\e  M2AAd!dJޗi[j4QRv\//B;^;)ZGr;">X40޿{lÕ@#y"{wGvsMqO9j h #b47mLvNby 5߼_V<pal 55 7m\psI O{&-4.=dae)t%e") !䜀8Aq+2HF{I#QEG B4 28"`3XRocXt]jCR%F R2T DL\YQM#z' &$BDLd--@J|f|z*J0ۍC K)h;=hbAj൏N>j+vm7j|ϕ~Rk7`&Z/U^ۭUlE˦1ƽ0kb= _uaViQ)EMzbΖȿ|rƠ@zIwkơ3jwz\*êeF[N)_|^XG&Hidsu@ u3[uP3kwU^Pc"m2q#RK8^(<u@6swu{AvuY^A%1{q9 bg"BL48[uhD&ۼ>] zNy q$Ds_ ,f`HًB9_DD qG Fn^rѫl1%PӁp0N擬lOZAH-l$&pnD* hkZ@' P[ rJ|Ъ2"DHiD˜Ɯ4qE&B8@Q2>H4w[?VtۚP5EzP&JD!WnjLB"I`DY 0v/32~~%.*fTYHo~V?+5(=mfImHnY}1 @4v#G!'$Jq%QHLM;hF Iqbcժ y Ҩ|¹dISǞIidP(X&q+CiQfD ĘR%'v27{[L#iiC9aJ<1%YGZQ"JxLU_E#5)TF8njX {|Ο,ˏ:QSeu!+:7vGɶXX5Vna4j9)>b#ެu\RW@,00g+Zovj̩N>f)E ?|{']Yx4כՙ #F CB8;c}zRҊU=O!SщD)~ 77oCr\ !R>ub5Gƣv0>}NFVB.:(Gi5j] ӘA֮5W2~s Ƭ赖vu`˻ڪczȀPd>>И]VґzͬhqV':(ƫY Ez>-猈+O t  @3WG J:5#| nzbip yߵw} Rs{JgO@̤R?עn&6s_1h2?,VzPM٨tvIfOF(胷 >˓~;zCz{d?!{Ԙ[W'S^>$[J?ʁ<_{OO^˝])PSԋS\ ~$yt"Ax7OXQNޭ@ֻ5o^ͫgٻ l*Bz̻Xz&SԃSj^i.{".4VJEEdC]]Ƨ^}.Ɨ%`5hzp0N!9zU39Ŀ`C'ܥRN8ArȻRIY֝;j1
@^KtMRQ, Ԁ뀟$4PG끵%Ukt:f@%y14W!my)de(g1 ;wv*a{b&M5%5ʦ/uy'z;] qXPpm),BFxG»SnoVvVƶO'G|D-00KX c!΢,JGDdDD"  RG0%0E) cBCdBO}:f.i痻J+f~+YR~/ٌZڌ5&c yƜ% #}g?jYԆW7lbl5?hs9!5kf.q8ߕAV ?mȶu_;pCSG&=.[ё+HGI~'1d3kz͸ݓ|;TY5>uwY&]Qgj ~1Ѧs$͗Y13=hӣ[yw5iRBz%~~q S]ʧm/wY"ֺJs@6lPYPuVD#b#%Dk,Ns/>Rh o a5~s hf}H'Nbv}cD-<˶F{X"^"jE}!ʒmEY^vBX)h#>Wڑ0}Q/Hts<$F*T@` 9JB'(ux l8UJ~[m(8 Q+Xsqq`߃&.KׁĠFwέnk qF7UK'D9(d9 "-OGR.ʶ٪v[d{+]]axq_U|k%WV}gbzlxJCUL<h= A QK_wP0vXL&M6A&ǗF3z-G3[MI%KVS(E5.D$hgcm쾴/jc9kٽ dQNbh}qz$¨O=PHWτ\{V;xg`_YK2EKЁhzCp_txIya1_V4 # +lA'>4X*Z)VK9c\2r @FzcFԣZېX#Iw78W~^/}vݮbi6#zLŀ`B=?1^u31GBqZlI Z.3 s6soțY8k)ŗgS6YDs7~~ ?Wd PSC>(L? :y:$I#z6 b=^}@ I툅|r=؂&ۆkMH:ƕ$J?V>0V20?2k~j>L8=?5BS3L[<%O6@.QsASyL*v , *RFөB| tu}*5o/XBI~o/"T]O-&>75 oPJ0=CYWބXB"CK&)S4$(Xr;8k +|%|9BitiqsO 'p .G@ȓ?w {_p>Ud[$\ }z(`5") wO1û%ĺ ±f6_mO ,7`D!Nd?K)K|׆_`,. fid?ߥR**>O/M܎M,v~rL; p\b> mcH0쑔l9+AjmjmEi1\bic诋d2ZH3E]L!Z/B6`Ϧ;P{c1_73Kؖ,_*d-Mǐ?ZŔ ) FFrp!* J)Ra 8{!P:LIV᳚(] CZ#V()Ay[ T yP, uVnrph6RãB %$KxT*d.H\ɂ  r{?T u$2*π=d43E-!2t|轐pBy 6<"+hUF:eWQ=Jh%l[Y4ݗԃ]*LUXtrL *u&`_繘ְ{ng Tr|q~޺so}!{c=>4;V0,0Flc1:cEjpq1UI(Ј"5_h$$w&9 E4c Zpg@ǼiAimpH#zz^upu5t=캦e%34RkJ4x;m8|lOG޼z]$[ZGqzhNVOt f݋w0MpQa+g,f`Fwdr;4ږJT\>;喯~Z;Z"ԉQkM|msSn#OhLuTMk9AD=+,CCSW؈"MC4:YHT!(Gm1rR V*#Zح!NB>[P x!]h(f8df ; J1opuN6P K aWOld[K~|֠%'{f p*ck](Bm͖RBOa#r[%BrAC?XOGXpIlsлiQ \Zr6a>IM6:'Aݐho&b"+߾/тk'Wƶ7I@$U6ZK |%h!L$.@pbwK*@eWa0d @*`uUD0P҄W'_E갌PKt!jRRɤOK"#eX[{}=5CLԄ6Mkެ4nlUNJR l 2ϯ'(q$u)>~KW$.ʓ (9O N7 Yϟ. ?4bn'*:?uдdVҎo BK腣MX;h&il y##2*a%' RYiKG$yzٮ[m\ynSҴے9eŖ]kHW RXW<8v"\ܯU\լ9]q0`B@SCc5"X!'<5ƿw7՛,p4Jl3:tHc%5S 7-@Ho#&vL)Eky=9c@T 'cZ#aˠ Ҝkc>Hq)-$؉C*%<" <&sE@d, [ B14@Cԑ3\Ex]dܳ N]em`$_ކn ΍AoK{/o9ng .?:Sˬcj@d|9#Qw#f'C틅A6/|EzezkU ^kfP0=@iv﯌{ԅ5kX]s Wq;!#BBO/K]޲[ѶA3,mk}墷#4S>ALnƫy<;2PK2ٟ+dfON`tU!Ӏ8kYsUsO?)(9 *W 3!|:jA@Y8*,͐39~5UmKڛۂCP0zmK9iBA>  |4܈3՚#DkAݺ_P4E&R*Ɂ.#O7԰ۗ{<֡F^B r\"NW$XMdlrfmZӛ/n|7R[/Z %A]iyuOs?3YF>iHgGYO՜nzO?nRp~ ҭ-rX;H^J&7̔n]p g6<%8ytStki&VB?~XAtC^8SsYUy^Ǫ0rVR3s9`F] `OaRF>Þ~u=hIYhyoEO[xHIɐ")>&Q9)H):9)$򚗵_=-PT;x{* TY)Fvsl-Qz}`q܎B\MiO]J4Kox𶪾8c>;}:w ܤZ6PecMiK8M@T5ڇhFҏS;)?n5[u)Pc|ca2'?d6Yu]gj҂=f yL JHS0A'GEH:ΤT9d8|}xޚ SRFJ*DH#N vV9X& F"-u\Gʾ-$A?ϰ?U&NȊIվȶdխKfR38I7B`kzZ!5^{=dd^]"M+dޮgqdMEMwһ8ԽMY]hEf<}ᐘ&Qvy kvɔd7pɻuP>t_3֖x{ͲlkѷH*ky8TTF?0`88^'9f1^yMa$ڞcobӛȬFMA୽Ώڨg41Sy:>ZANc6Wiw7z|\>e # P6u " }8X3У[k7~V]sfj:7JRwV{[ +ߞPǪ0!GgZ,_ˀRqbJvx/*URNchrc荜cޣwYGfB֚HK& ` kɢ{J1fjJVNtQAFS!,#J,79 Œke(]]ߓAނ}17:rjzEk-&%֒BV $i˵wHmcyc1yޘn$gq}}Q@q܎*ˁ5X}2^ V;jGenYQ{V{nwVDc?b H@J伒*BǐЁsDݐsm}(>W9Ws,m^q:A;pt l~e!9J2f򂿱?Bv3D(F$6E}U)wfsmN[j>iXӲTPP+¬lNH-OnnLK7˯hy+->⍝BCOp o(*^lD s$ pL*! FN I2xy`E38 =mJd)v9f=))+ xhGjb!RYg gFNIw!%ˉU~g#$/s 'VY|hd E_Kgϯ *)6S*ekW1k-CD섄Xrvͱ,y݋e\O~ 21[ZʳƓ*cG[c`6`رTA5z힫rv33۲Si&E+XT+sPh<jb4jys-yf.^!zVK1p>OV%W'#^Ϸ'X͞5rX.Izq!̲FJ 0*4;G!xK|Fh6O>X?KIfD)ыDĚZ51%Jm}hmK}t@ $E?-ҚX0J =?3z=HGǞ3ȸNi eό5YUl뻓q'Xu0uzLyX4 +6FZd m׏c{3g";m4{NԏL=X<ͬ Ɏ` e4rHubNf ԏ(9%bJS6&6vVM4BIcMJήk! D: cq9x8|FqY h MS9XzͱخY}n[⬭U5Eؙ(ZXm&3jOjaz|==,6͟ѶU]fq-FȚ%G!UfC~~c;Չ!RO:2&]"[c+m7<*bAנK$Y`|X\(QOo3~>vnjݘd;`Qɵ}9 {⭱HvRY+>|N6Z*jf-FhuK!݌_ZwwmukR)kꯧ'iPJ$]9 쵅h n!$pѻ]Y}G3'3εC̺`z^p^ EmT ֩4vjc'xKZeSo-Kۍ ~_?_}cѭ0UʬB99G9٠Rɩ/K TBW Y YiBS/xkX.YՒ}>bGHIdr6l䱴'[~^FܼTDvPY#d%GKi5twY|~n[FtUYrȱBjR:ȳڎl8xy,pֺ]$_WsJ؟&yU"3ʧ##tF 'B,o@ !.->o7J1xb^;zWk(;~Z*`H^m bb$DKISx$(뻱3M:d*r,ʳ` *yN8Ӥg~jv35EXTZh)U1M@bO`b+S%V{oNڱ;W<3N&L{Gg[9`qخx6G r17WX S̥ENWSQslp7L8=7doϛ :9j`rZ XU)W.×-S@q8l2Уfc6|kr|'4Sz}Y؞`e|7F $<`m%+1@障+ZўCrtX5wYmPQ`zcRH$l# UgW`Q9{f &皪Q1X X &h z: Flr]H=Kwm#Q%G Vqx wY'F>orL~fnf7.)+ks P") bBJ[[Blnb0]^MqS#$-A h8:<8LK7RXH6ڹLj @ RQbj _rO9` = f~ %yq.^0c8 b+C#/52I}xIlO؄]ͦwS Q֚ު{ǹ>\f2pt^Ej;UY9) ;vhapyam-P܏cёfnUG3Q1polvS-Se+n N8lZXV< 洉%bFMJXe~trx=_/ h6_4tBS3 @EPE\=z=?߷lMx.ӕlB'#Znq&/$G]i`e+~1 ֔)^Yc3ILEX"a,Hg"(ҴaZb7X/nE]c54M.**eZ!W$wC)XcW%Fy4}A`5_ޥ I}|΁ p: 8-K}{YgH}{iԷ^dK7: uY98Êy˼ƈ^* "h-Z01s:oNNiO ~{a'28LxUic-LXG9-MsuC_3!(ՙvPdA:!5/X`BM+`:d'׉?wa{/л`8졫R4PZKZ#S}eq ~`7J<^E7^ZƃK-uK]Xizvה|7 T =r0⫓^b1G+dnnu"tߵr֞V_( ,b6v'}y'~A"]+Qտ޿>ٚU!wx }d/h[{," P0.f<*,oc+ZICyt_w'E$#QX㼘'2|ʺB7q_Y-x@@EŘc)8!)U9eǟ dT{#}\Q-iئy*(.Go>6^9Olc2̋/@ݕ2eDėT =v78GwvEgzG#OOs2!cd(llDpt!dl<M5NO,QP7xV1+iWwqb2>ybjFE Q ̃p$<;-VSNâ5LY]dgh2 Bc*fASCꏋZڶh\Kp BEAmo+.(hOבoJף6V7+}V*[ZkMz|QFZ볯tv^ #P8:K?gӅ6)r {Lh+"WLtR?M}bhٲNppsjp庡0=.XBnZ9 {40z GP9 lIp:"*B뿟b!8/oPʖ) VwH2}d{KttNȸ/<C L:iɸjvhDq qGqɀJCX|}k<6cWMI$Wu7WAVΚ<3"9щ۸vWʢmb|14\*'4GUgo23*❒֒Ap%ϧ%\"ȏPoscQd1_gJzjmVTKhpo;mͨamoѯr9ZAါx h깍pgq?8mk+oq5[Z?tz;!bI&H.e)vJ֣)ÉNAHJ 4Z3PNVcmS, IAC4V/s;  x; k npB&S+5sS"Yڷ`I0R`<ݺn7;һFFZ"kFnJT(jgJrXݚOMΎs3,j!%zF}b!pN [rAX S.k Rf^rAtA`( AOgƊ qP y#l]ٔ<^S?^TG֏xokF[u-;iG;/q+j;`Nz ۲7SSCkByaD HQg%E"$oY* NWh<+"|ɦȥė9dt҉E'bښv{K-hۣ)S7ȏ,y^ ~.NALpS{`2HXBbMe%Y;V-{[$M~UiVr!8nn5 tTFLc!c~je}zmթNpP[Ea%eQzi+{щގNaJ_BK}éž,nliս.Fazu6p@޵q+EЗ{{zVa(bm4I&nm#iAK6lZI-jw7!9s'e|W1;8wϗ $yzf㼗O;"@$~\eTTSNuyglqH؇eZ:ՔSqki\;-a| ;-}ajIK[K*Ӳ3 ae}j)EqZZRM9̏YK9rq^K%OS=SMNZzZ y_ 8-8--f 8=i1ji\B 8--f8v8iQjiOc :TE5R! fhGֽii- 8LCߗinP΅N*A{ BIݿ6 l F۵ Og5Xw~DZF!\d,N@1"j1@Xbv8A]W+Y5w,7^IYP98픎{ 4RK@8 /`C|9anM}uw]9j!| c{4\w|[ H0*Édu [a5yCOk`1 u<7@SF/ ᏯىZD:(`&em&^ג r uWx-i\ |MF=^[|dleK~Gv+ߚF?1nh'I3k F^Gÿ;c9x?u~omņ3[RJ|֓~?snHi3Fvf񙽽},Fٺ|Sk)(P0"Iӌ(g"A $l;6Dż2Xw(. %EPe} b@3bbX~|.$X0+Vd]2ڕii.XLX&+{n}HNc:4ID#:$A@4: gCz}/b uDƨU.Zk%VN8v Fa^@r֒rK}`* WZkEj 43b(-W*h!E(1|[*aTR@4r3k'X "o#HSXc {I0 \¹ӵ|z?bbB+{(fj~3?UyfYLIݟ@C1@F3)-%U ؈\)`@)(! K0hֵ]Fb(@J"0KV l|Dۜc]8 X"gMsJ R:g'M+)7Av˕ǽ3u^6{f^큷}u>@R@,aC4֧ 0ClZ>Ė(w}½ld:[ͼ,;u{z7ͤ~FMgi\O `:}k%TVA;[ _favwVXi4>MQ-N-U$K7ۆ Oxd ?n1Bt?7D׾NtZm/1an8,츎_a5Jmht{0S;+Z۫$l,8bQ+n5xSͮWR[&&lO5d #n?Q #V;׽dRDըseY׹0A9Ҏ"lM5n^lA ,η;֋v98 L>\Z{,攊ALw4)??)鮘S/1ShN1yǡ~-I}G붿#)'YHֆ|*Sȃ?Z7 AXD T'NAE[TXւ|*NQD1CTj@/&mC/HO'9iiB1OPDFZ6 zX)NG,Ar>MN5BLLBF"{[aҧ֩vdK[KIO?l(O-%+~EħDRⴔr_Ah)őT{h7| -GI)"\rf* *pNLArZ0X`F3 8( M1c(q*f7ׅ_ϴ/[gdhN3$XO4,:(L0kVa/? B[d#;gbX$߻Xi{)BC(۳*̔$ y 2(\}ssw\.aI` ֧-{;1oW>pH-U?>['Nʛqc (^gnLYJ?]$cmةJPb%0ܾŢl`jL`A*U??[ >C " gf0&HoE-F.ȆyҖ Nl5H#B㗣$_$eeߪ8 ֞%G6N%#YGOF=,`kb8m"Iw4I@vBw,tQ+j'0^W[ʃd<N-FllMJ@q!nޠns2e"IB Tyuμ]F vK Ͽ?GIh;WHPIQ i4%X숓 Ӭh$ dǁ! LEO~` %5"].aLU *0"hQ@nR΁*WX̥E!-TTR)8uyy _"on/%$xYqCf0(k  gLwvddN2HՈP+HmvX5%-fTԟ'b C瞗?-/wD+M2K e&r?Z4q,e{阧G[.' D;\~O=d1͋GㄟahWIϧ40#;IXSvB e)dי'CzhPI?x p4aH"&SS i݁ 5VC9b(B ` TBJ)*P<'B@jFD(J.$()X? @jvXuz9#c X8r / $¾Dݹ A FJ <Y()H Bh !۶@ZZk)rS(\BqM)L3nFjjd1eX Sg(+Pkj37&7@tts9E"2pӆIIwmH.ܟksD4V:κ!Νsw)h"3aBwanL(,?\ƭu˷>.邍ă)Bq;4%Rd\ @t)ӭFw>֓RnB@Dvlj2$Hd%V8밝g_θ}Dy9AxBT} 1AK?粽ܧ]Yo9+4%Kֽb1_f $]ݲVv7O0KGỴ$n,)" qqLJľF  ~okUbA 1$* (9d;;Z&gyȰb[:WEU Y*jVba*tWGY9[ I.Dv*U$2Cq3R{ U7ZUi>z`mzxy1tM'Vq(u\́V#,]ϭ eTe2k /W^&ӥ},nj1]QT9JRa$:ićÇwl6omg9ԺG[&}~= <*V.ƒ-*(}m{ݗaN[#-^i;U +->nTXV l_*ںlf\o:Vk]pX%{YcM\0_^,o Ujo~o(ϓQ']ޤ,4oCiռsJnby/?ؾ`$<OrerTUkem*lLPE&sQyH'ZZUy F)ۀJU*JJZ+מte@Ì4Ce0E0AG*Y;֍3a&sLRFePeT P^`ܸboCCTT3VS (f΍JpUK\%`dP@U<2k+W猞P1j%SǶPC_%{Uqߕj2jR ٬uaab@PsG1K/kk`M D.c.Je0ZˀSvlvҘ:{Hy{ݷ4JԎV9QBc`i^fVxs_(ԣ ]ŕtQj]Ը~ztݍ;hJiTE-PcƂ@jAߑu AZuPzך\'sɄ'dM&d'𸢳qtCVG8:Z{iń"X4(GUKb5KЛ u.!B'sǛR:̛fP{Ί轄Lp;C[@RS^4)L7s|\1Jtb[] %5Xf6qf2H b ƀʅ85_?ܙIS!pr{9T{I$N4`|g 5쩈婢-~$hYa{G@QNM{燤ɈTaրظ$WCvKNyN+aY>?ܥmŸ KMy WgsZп6r~gARڲACC=_ɩXWcԽ$ZKGО!FDG3k ;{Kػ#.lLu ]W? ;=^%{/}Zݍ;hוkbGk.7]DDOBF ytAF Giȡ ڊNםN:B8!} >} tĐxIKz"~;`2O0050&k31y6ZEiὶBn~җ<돛 Dx&|ۑm f6/'gϵKJ zWaC9NSzD kGӧUOe~&o1pohm=SXT>DB~p-V=ܱMq8VA锾w0B88n9ޭ MȦ;sۻVǔޭ,)|G'6*l"[quxwB~p=ئ$axy[s細'23!EIlz#rmS_3;zI`+^al,¶Jx6kϺQYxVG}R}0RªSS!v=Od"8IؤEzJ?tDa"j >jeT`U 13L<^keū+'TPuffG"Vz1oKw CRОw~eMs/؏7\P0ڭycfv:כX J R_ >?tӈ1䙅 $cS/3̩VA/96Y/g!Wǔx.cY++Ka, TfC'rji~t/-:T‰X qJp`־2cN-v,%u<%7,@h]HW`*4-W&i `=M-,Xgb07Hf&H:ghXFZsN9#A+Vf^s%@1tJeI)AQiqŘ`4v2A} ư|4 :35_ f12N_g3ЂF[ɹEeLKD9SsΤ ۉ1:xw5b9߈!ef,S31-3 0m|idAF@S-bK]D̨E&j*,pOqu]Dn=zm Њ5TnSͩȍNwB J+;^8BepbpnO7Q3e=<UZmY bDÝv6Uѵ=QuШ1J 5VyRG,jk 8 '=7giC "or)o. `q}ĈS/>~#gZx<uZKGbZZ%S<u4<:mtSdǫfWBWSx߃Ǝs۱ p EZ1EuzM3!(;B6 a? Ły)[~X35::G$?nZIu\D фQw ETaera!?T몄cI([)9S.mu:"KnyznwB~p-)C>n»bc:ݦ\Ehn@ޭ Mp } _:_ӊ(*"S w.i O϶ze"A}5<vI]uT̮_~^_\'UկbZ;~h'5?(K={ CwKVYFz7lhnz2z(h2<?Pt~]P xߔf"\ ;-ac\m| )uWtuJa>[/n0}JgL~~8ўB.ϏK:8I4 ϗ\`7i|}y{|ߵP,H텦<fA;íM RXH,JIΔpAp)[c:h 7lvw6N{[c9lU,|7g1s'inϡEE. B 9D NlvAUWjy%)&[Yse๛+JQnR\ͼ)g$z39ZkEHf.$Ti;k=hwF%l<5]^|9-케Kր*ˏl:5P tԒ=Z*:6L>tSM60UpEs 킭!sK(UXy2FـNyH+#fɑO&JwY# 9RW5Ck@WgFW])l-tRZ@[k=nGFV+{\*|3(ck 0qS:a\N%7 lPpD98㬕EtfUMGt& : m^UkiU7xFj2}rHT=Oc"ӢsNg͵Lh|qaDX6Y(ދ KLlSKt>:Jiqθ_novu}p_aju}vzʸSΌt} G7bgOh/qzohv6#W+CȆDGxלmĻ/!m~F{v'9մNMQZDJSbv:Uހb]g/,o/VEy y0OwjYSev,.:}ti}vw]u貚DƘN{!˹e³% r"f ǰ`);6%\_\}Oo*ϽbB*SfJ#x6 0YťGAҜO#ӊQ 򾮉eё*U*}04_&Az<2ˣdhn/% Nu8)Q֚(kUɺ>\lk{iCfYW*o'.2eX#kFos*DūZ鵖ZN!~ JRSH]-@5sSqpX"]+E*<**$ʞvmKi]: >rZN+V&ӯ1M9AHЭI$NSQz=O{B̽E\XMJ`\V *-Ue oC}%dI1+ T8;Aqu3zڽR@EuYxn/V$|P2BQ:{p\VbƋӻ&EX;VƐ*Ci UL 'xUrX^{Ej:Z{1R YEzqApʋ0V=9s6,h8q15h/E ޸2VJH@d=9h8{t7[˻1ؘt5=?m#G%_$X%L-&Jd#(r##$ lYSU,>EYmՎlyoov(JVC @qz8BE|8S^77'*ڷKBβ9K|%̠"hxXž;]0+e#=7N=bsJ /PVc$Sfl.RVu0[R u@- F0!BԨwI -H޴"u6NQ<S]qw"Vc~ѓjY;Xr21^I'>w%@z>/Ilc0~[/bP2H'E \{T];DIƏ]RGqW276@;%$gS:8SI徑iU(}0gY_(S:HZ6M~̟F&`pL-$?(.Ǡ [5[2 r]P}&"//H+?℅䮞X%0ŬveJer>#D.r瓑@q"9cH$% \ƀ۵U6S;ok~1m<%ףTSY5 @հy]e"nDLgWSiؑ"]&b0Z¢"V0A:x#q@S]їT}{Uhbt:fK/Ո;T$ Q.H@}+8}'mZ4ss|V=Nu!'L4L,+ᩜzOA< w='#DU?l>k#WSl~B(ץwRZ2="\8'M=@L;R z͋k5fFо;)9ܙ# kxqvTZ `im̗Jx,ɸ9*}]GWuG!xvSl 0`a?ס4A֏PFI81_|h4B֜\Tk` i= Z&V<-*RmD3לr4A+ʙ El`9}yo;&4I%[u%<9 a e=#b. qxbتݒix3PZN%6W*E)\S.Kq!9e]"c!чiVS;%5gfp&2b[]@Ьl$+K2D!xridd|IPPlO!?~7EӨ* wk7w{*yvp~1\7s:]Uɼ|U*CW7oݳE׮Z/'oA]V݇:q㴘5']Ϳ5"@ȭBa{oj2йCxPo(Oyځ| ~򐄝E MqjSҦ O Nt$ g)@d;naa, >UO28=XIFrj>֧ %+Ég͡[9TοuÚk(y pXKÜIwj U}zOqN%JQC s&RZ׏} H0#a]d-;G;TlZMlpG\'ѻ)LH ):ɢYąI5HSJ9c54A%nN\ # B8(wAym-t9MKI^,%YhoW~5w$KRJ&JM u!9 ,H!Ft@_իYL/i'n@6~sqgQu rk$xp& R-ӎhQQNiAaQC9gf2Sl fJYh-aga+0!8݆&JH(!Ծmg5)`^(tcJph̑$&:N8%ryu!zd)wd=磉9|Us26߮eIXE*>i0(zr7߼pEz5O?2FY󗧜rE(#h|)Zdw3eδn0v˿J@LܢI9@dRn{ 838Z?_LV=q'v(G,wUƍHwgZkB`J;G/H5z4/ktmr7!tE'u0Z@;CFV G֫c>%l@悹W(_QuuqR^9uNGոX-Z?/h#++C]i!U/d԰[a٠LO|IϹlG[.n? ֪S ԾȫL֙2e%BVB6Q.Tr,Y"O_瑵s(y" )#2iXòd#篕,SR J=ΑCo(>ٳӋ#0 "8S!}-YY[c\HLw~@U21^ _rumbL. qP uptW ;dH~zrڏu&a5wK/En=ܺe\[yQJiGAL2H## y>9eo ;'js٨!~T)F Spd1(!ߏ]4.R`g@Dle(dHwђ,B̔_S iV1dȴQJ v|2;S>?LϞA"t̪XDPs?a1~xߢNG(f]7'KP-~Inzv5ݝٓ}$^TizVk6g+6_'(g]x|8YqM.0>^kqw7F*jT)%Ft`^*}$6~z0C|d47"( duFmT5ŕ"1 X#sXye=SUS*' hFnr.uA6SOݺl?v)->8w~ܭ Nwn/-:x2Rޭ  mJSB;i@q>c \/wmmJy: e^;.؝9sf^6c[%NŖbYn[l+WUHdIx jWr< G^agaN6(et.4vv+2h,P%&G&lg[,Gإ9+|$U*4'P8a$ NӽEVYߨҍz\5.'t,1.o-3_?E'恡{J"0] @}l~|8c([y`F>h/gNO "B@{W)6S=t0ܚyz7|FȀ=ɳ2%'Ph9c篴3%KFVLSټOk@FSk E&" dUfAN%z3uj#SQ ~Z9_\?1ϱ-J;x`IĮT0w԰Wjr>*G=-p2+ۈ=L&ʹ4OZx5T2 6 k7p&c9ORr$QpK8˂Q,]og^]mym?e"#o)^9NK>8܈ܥɖE͡(E2:1v( BS"Ug7hSn{udkw7c' T'N"%{VZ-K4 b"J#<UϞy& <*xќt"2Eo`ZIq@?OJtj]t\6Q5=k4t&ǝ" G蒆W4D`d ޝV|(/ Q̈́x14$񟬝3K<$5h&KwTr402%Q@jvc$PɼV[AZ)!9gn\:_mVs;B *:IRə+~3KA6{b"~:"[Ie,eCONp?xc(!ݝh? C* (e2h.Ԭ,B9Ͳ`YNKA9Y2.>\r#zs%uZ Z&+(b{qi:6FI6z2zveq0:;5F]sntve vfދg#+-,ucψeEӖN= ]R}Я8`,u eC b$)F@G"@G 5,;Zey%{Y6tYB]LɧOAVFLHQˬ,pair?Q,i_A$X}&a{K|+JA;FY&OŒH@[ Ҁ'0DԨ])&N#Ųh)[>nOo6zo%M0ƐDCea KGwδ*0cVrbn!8(+?6QJ mHrI ,be[UJ)Yg̾jL+ 1z4;uIl(RDZdF%PtT @k; Fw1ÖL੢ ),H#,Azl8 RU^1KDQ%Dn ɭPڗGB%őPjAY/u),/͸\MgWl\++Wz 쪢\hi "FqFUh]_+݅M;EltLjVibDfNj)_FJWHJcp&X x;8Z"n*U\Sk:D"[QhgM8Z !0_#90bmāC]J#>wS(aN%$(:J@%P G)![J{ex%^jeT:TiZ&Rϝ˲%^9ׄϛrZRK핅PQ㽱i]z&a p_W׃WQk{=Yſ-ڽgZ#X~\naT^scIO- r|mT9%ru ~^k4r~w~o3%_uT{.B ˁcM P òy0;=LӛL߿%߰ޯS7X<ϿpGo >E$G ێ¢A(,Vf,},ڎ@F0ۘfc,錌sbSWs:EXCf\tio"LGx" p2YoPMCJ[O~Acγ{{9n^wy4:j^I3춒CngYmdo:7{լePPjz;Wm}ƚ[V(ДO5_4yf0_NCBLNjb~-Z+E~v3 8-=b 9.πIЃScLf:dr^Svw9*Bǻs\"҂5i?_;E˹Qɇ\ ٞiz+ 2גSBact\2QzAe(EUDht=ңhBbzۑNA@eL\ETd<:%B?Gd< 5ث% -],)a?G'^k1J4dēQ]u/Vno޶A'aCvm®+앓\i6#ן|=P)=c7"kUG%_N3ޑMߍG%"Mw D4] [֥[vw yh5c8{}=kiyrgx5 x-c41^KI>QKߥЍ#jVy rnjyX{۰kL`)w$knxM6aꋥLg_DQμ̠$d A7j=!`3UVkʀ1.n2J9bFp{#Tq="Ŏ9ΤܭZv\m ζVK mA/a/X -{Z4XX[r_Ϡt,VK8yA;2(%U{M>^m5XYu;zG)KnҺղyづϑ۔ }~'}޽{0wѺ.,fonjc}\67;+[d ^V*dĉ\ΜXpe]+FPfSuo7-׿4c@pƧ )04S)@l[xZw]%XBiYi`02fug(0>udh8F!&2A;.إ{)*őo6W5ѣWse!WS.=T!LA33)9\9 w.6bOXγ;M{.(![cԒx`7sp}-ڛΈ:o iu3mSϖIԲ4\kqˤvH![tny!DiJA,iGSDP["4H1&i3FUaZTOڧuȠmkpRUu_IwIh{a{]^v|\B3|qA(D'A)q"R"YD&AӔ$^ UPQa/=%uݯeGP6'~[=wž4g?69MR/2GkW&W˯:nMb}u?5˻Ȥ$yW3X")i@YL9tcM4$D4Uh¾'KCQS)4j@Q6h:3yp*5@cI ØL%"MJ%K"0榩6Xj,ѿ9_wUܞ/{GzfI|7UUOzgސI.x~GVseBjIť\ޘarJ8={K̿fv0pndzolBM(R廷3=3:5V&ݚkx|pk3`r+ʌelx(=*ty 13837ms (05:39:16.373) Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[716157317]: [13.837891581s] [13.837891581s] END Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.373585 4712 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.378277 4712 trace.go:236] Trace[1146985658]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (31-Jan-2026 05:39:02.637) (total time: 13740ms): Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[1146985658]: ---"Objects listed" error: 13740ms (05:39:16.378) Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[1146985658]: [13.740447695s] [13.740447695s] END Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.378322 4712 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.379711 4712 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.380715 4712 trace.go:236] Trace[550540329]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (31-Jan-2026 05:39:02.566) (total time: 13814ms): Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[550540329]: ---"Objects listed" error: 13813ms (05:39:16.380) Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[550540329]: [13.814190524s] [13.814190524s] END Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.380769 4712 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.384608 4712 trace.go:236] Trace[926538168]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (31-Jan-2026 05:39:03.208) (total time: 13175ms): Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[926538168]: ---"Objects listed" error: 13175ms (05:39:16.384) Jan 31 05:39:16 crc kubenswrapper[4712]: Trace[926538168]: [13.175749962s] [13.175749962s] END Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.384698 4712 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.437046 4712 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:35768->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.437104 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:35768->192.168.126.11:17697: read: connection reset by peer" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.437472 4712 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.437536 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.437954 4712 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.438018 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.453380 4712 apiserver.go:52] "Watching apiserver" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.473564 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 11:43:02.123427128 +0000 UTC Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.476852 4712 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.477568 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.478137 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.478504 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.478842 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.478946 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.478989 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.479107 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.479156 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.479046 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.479266 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.484368 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.484368 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.484469 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.484526 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.486941 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.488127 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.491238 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.493073 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.493361 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.523279 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.537110 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.551260 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.553705 4712 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.562918 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.572366 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580693 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580729 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580747 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580765 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580785 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580804 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580827 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580843 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580860 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580874 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580889 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580904 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580942 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580958 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580972 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.580987 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581004 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581020 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581034 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581025 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581050 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581085 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581102 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581116 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581130 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581197 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581222 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581260 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581284 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581306 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581323 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581340 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581355 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581372 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581389 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581404 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581420 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581459 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581476 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581492 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581509 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581518 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581526 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581576 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581598 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581616 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581655 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581679 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581679 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581706 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581731 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581756 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581779 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581802 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581834 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581860 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581884 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581908 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581932 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581961 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581956 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.581985 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582009 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582031 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582053 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582076 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582100 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582122 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582145 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582165 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582213 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582236 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582260 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582281 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582305 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582329 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582359 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582381 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582401 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582423 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582444 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582465 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582486 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582509 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582533 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582556 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582582 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582604 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582626 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582677 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582700 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582724 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582750 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582774 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582798 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582821 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582844 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582866 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582887 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582910 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582935 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582958 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582981 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583061 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583089 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583113 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583140 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583184 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583211 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583234 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583259 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583283 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583308 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583331 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583358 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583381 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583402 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583428 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583454 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583477 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583500 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583555 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583580 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583605 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583627 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583649 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583674 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583698 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583722 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583747 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583771 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583793 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583817 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583840 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583864 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583888 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582045 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583911 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583936 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583960 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583985 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584011 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584035 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584057 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584081 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584105 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584193 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584220 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584244 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584276 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584299 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584320 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584342 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584364 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584385 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584408 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584431 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584454 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584477 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584500 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584522 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584768 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584801 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584827 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584851 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584874 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584897 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584921 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584943 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584966 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584989 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585015 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585038 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585061 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585084 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585107 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585152 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585193 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585216 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585238 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585259 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585281 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585304 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585330 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585352 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585376 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585400 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585424 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585448 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585470 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585495 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585519 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585542 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585563 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585584 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585605 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585627 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585653 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585678 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585742 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585777 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585808 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585836 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585876 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585902 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585929 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585992 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586019 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586045 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586071 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586095 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586114 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586138 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586211 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586228 4712 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586242 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586256 4712 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586268 4712 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587286 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582198 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593441 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594670 4712 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583908 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583915 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582475 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597905 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582505 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582553 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582754 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582904 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583038 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583045 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583059 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583216 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583236 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583245 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583261 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583259 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583279 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583309 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583380 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583387 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583406 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583598 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583635 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583679 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583689 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583697 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583709 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583746 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583886 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.582095 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.583936 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584092 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584334 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.584988 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585042 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585139 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585290 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585351 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585667 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585898 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585921 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.585958 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586314 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586603 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586618 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586667 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586828 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586851 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.586893 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587086 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587119 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587204 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587245 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587360 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587548 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.587820 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588066 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588190 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588306 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588432 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588437 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588605 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588692 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.588730 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589284 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589287 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589474 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589488 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589503 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589826 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589887 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.589994 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590031 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590113 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590277 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590272 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590295 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590325 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.590916 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591132 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591145 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591155 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591354 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.591401 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:17.091381544 +0000 UTC m=+23.185263385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591401 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591452 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591921 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591918 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.591938 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592015 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592349 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592805 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592839 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592854 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592949 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593034 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593063 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593236 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593264 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.592826 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593645 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593672 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593735 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593733 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593791 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.593804 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.593873 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594029 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594501 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594585 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594618 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594880 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594927 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594921 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.594942 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595039 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595107 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595114 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595217 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595261 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595286 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595310 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595449 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.595661 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.595811 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.596253 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.596671 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.596788 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597009 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597379 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597430 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597556 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597632 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597724 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597867 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.597950 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598118 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598242 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598131 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598797 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598799 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.598813 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599051 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599124 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599211 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599234 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.599264 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:17.099240568 +0000 UTC m=+23.193122499 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599448 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.599473 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:17.099461054 +0000 UTC m=+23.193342955 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599646 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599654 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599718 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599744 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599774 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.599777 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600058 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600063 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600164 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600290 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600429 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600502 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600656 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.600665 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.601466 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.601589 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.601741 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.601996 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602000 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602218 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602268 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602441 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602447 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602526 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602530 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602594 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.602679 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.609465 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.609592 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.609903 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.609996 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.610166 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.610833 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.611078 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.611264 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.611444 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.611432 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.611574 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.613994 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.619472 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.619875 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.620401 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.621245 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.621163 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.621327 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.621266 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.621340 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.621406 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:17.121388364 +0000 UTC m=+23.215270205 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.621775 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.622310 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.626890 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.628737 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.628793 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.629875 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.630551 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.630579 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.630593 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.630652 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:17.130631945 +0000 UTC m=+23.224513786 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.635336 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" exitCode=255 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.635382 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312"} Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.635436 4712 scope.go:117] "RemoveContainer" containerID="7a64622a4e9639316cfca1770d2ba9e9bdd835107d752668612c2d0a1451b639" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.639944 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.649262 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.649341 4712 scope.go:117] "RemoveContainer" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.649623 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: E0131 05:39:16.649657 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.652325 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.653441 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.659887 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.669626 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.679211 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687231 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687427 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687526 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687630 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687693 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687753 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687812 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687866 4712 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687926 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687980 4712 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688031 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688104 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688163 4712 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688278 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688350 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688407 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688463 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688520 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688641 4712 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688711 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688764 4712 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688833 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688885 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688898 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.688941 4712 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689052 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689069 4712 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689080 4712 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689090 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689099 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689109 4712 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689119 4712 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.687356 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689130 4712 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689142 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689154 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689163 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689191 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689200 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689209 4712 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689218 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689226 4712 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689235 4712 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689245 4712 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689255 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689264 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689274 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689284 4712 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689294 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689305 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689317 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689330 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689343 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689354 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689363 4712 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689372 4712 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689383 4712 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689394 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689405 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689440 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689451 4712 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689464 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689487 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689496 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689506 4712 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689517 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689528 4712 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689541 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689552 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689563 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689574 4712 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689586 4712 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689602 4712 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689613 4712 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689623 4712 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689633 4712 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689647 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689659 4712 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689672 4712 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689683 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689694 4712 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689705 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689716 4712 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689727 4712 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689739 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689750 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689761 4712 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689789 4712 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689800 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689808 4712 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689817 4712 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689825 4712 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689833 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689840 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689848 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689856 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689864 4712 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689872 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689884 4712 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689895 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689908 4712 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689917 4712 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689929 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689939 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689949 4712 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689958 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689968 4712 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689978 4712 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689988 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.689999 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690008 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690018 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690028 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690039 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690056 4712 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690067 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690078 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690088 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690098 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690109 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690121 4712 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690133 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690146 4712 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690158 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690200 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690211 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690222 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690233 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690244 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690254 4712 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690265 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690276 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690286 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690298 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690310 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690321 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690331 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690342 4712 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690352 4712 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690362 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690372 4712 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690383 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690394 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690405 4712 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690415 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690425 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690436 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690448 4712 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690459 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690470 4712 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690480 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690490 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690501 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690510 4712 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690521 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690627 4712 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690640 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690651 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690662 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690673 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690683 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690693 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690703 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690720 4712 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690732 4712 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690743 4712 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690753 4712 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690764 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690773 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690784 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690794 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690804 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690814 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690825 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690835 4712 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690845 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690855 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690865 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690876 4712 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690886 4712 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690896 4712 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690906 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690916 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690926 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690936 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690948 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690959 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690970 4712 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690982 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.690994 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.691007 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.691018 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.691028 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.691037 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.697536 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.797700 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.806357 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 31 05:39:16 crc kubenswrapper[4712]: W0131 05:39:16.808768 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-45025de1ff53882d3c8b12ee5fe06dc8e75dad1c6b83a4c0efe352b5937d7915 WatchSource:0}: Error finding container 45025de1ff53882d3c8b12ee5fe06dc8e75dad1c6b83a4c0efe352b5937d7915: Status 404 returned error can't find the container with id 45025de1ff53882d3c8b12ee5fe06dc8e75dad1c6b83a4c0efe352b5937d7915 Jan 31 05:39:16 crc kubenswrapper[4712]: W0131 05:39:16.816985 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-e292c650d51220095907b600d039baa62a11aa2d979807256f431c66f5ec1e91 WatchSource:0}: Error finding container e292c650d51220095907b600d039baa62a11aa2d979807256f431c66f5ec1e91: Status 404 returned error can't find the container with id e292c650d51220095907b600d039baa62a11aa2d979807256f431c66f5ec1e91 Jan 31 05:39:16 crc kubenswrapper[4712]: I0131 05:39:16.817667 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 31 05:39:16 crc kubenswrapper[4712]: W0131 05:39:16.831068 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-9ed0143dcbf2549155100ed1021e5ec8f2eb0fbc68e14198d0dc5aa9ef9cf8cf WatchSource:0}: Error finding container 9ed0143dcbf2549155100ed1021e5ec8f2eb0fbc68e14198d0dc5aa9ef9cf8cf: Status 404 returned error can't find the container with id 9ed0143dcbf2549155100ed1021e5ec8f2eb0fbc68e14198d0dc5aa9ef9cf8cf Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.094387 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.094591 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:18.09456138 +0000 UTC m=+24.188443221 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.195095 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.195142 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.195213 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.195257 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195344 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195378 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195389 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195440 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195456 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195396 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195496 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195490 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195441 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:18.195421313 +0000 UTC m=+24.289303244 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195658 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:18.195634918 +0000 UTC m=+24.289516859 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195678 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:18.195669049 +0000 UTC m=+24.289551010 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.195730 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:18.19571794 +0000 UTC m=+24.289599901 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.316639 4712 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.318214 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.318285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.318297 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.318371 4712 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.329201 4712 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.329480 4712 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.330700 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.330749 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.330761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.330778 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.330795 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.350023 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.358496 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.358541 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.358552 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.358567 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.358576 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.371745 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.376621 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.376663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.376673 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.376688 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.376700 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.397406 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.403204 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.403284 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.403304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.403338 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.403358 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.438317 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.447482 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.447539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.447552 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.447575 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.447589 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.464814 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.464971 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.466697 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.466749 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.466761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.466780 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.466793 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.473907 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 05:18:39.456290524 +0000 UTC Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.509245 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.513357 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.523924 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a64622a4e9639316cfca1770d2ba9e9bdd835107d752668612c2d0a1451b639\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:38:58Z\\\",\\\"message\\\":\\\"W0131 05:38:57.866352 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0131 05:38:57.866967 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769837937 cert, and key in /tmp/serving-cert-3746916109/serving-signer.crt, /tmp/serving-cert-3746916109/serving-signer.key\\\\nI0131 05:38:58.400608 1 observer_polling.go:159] Starting file observer\\\\nW0131 05:38:58.403643 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0131 05:38:58.403796 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:38:58.404445 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3746916109/tls.crt::/tmp/serving-cert-3746916109/tls.key\\\\\\\"\\\\nF0131 05:38:58.643581 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.524409 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.544350 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.558829 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.568981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.569030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.569043 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.569059 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.569069 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.577863 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.595301 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.615745 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.633236 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.638641 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e292c650d51220095907b600d039baa62a11aa2d979807256f431c66f5ec1e91"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.641816 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-zg9rz"] Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.642147 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.643157 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.643236 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"45025de1ff53882d3c8b12ee5fe06dc8e75dad1c6b83a4c0efe352b5937d7915"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.644210 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.644432 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.646072 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.646631 4712 scope.go:117] "RemoveContainer" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.646812 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.646877 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.647705 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.647751 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.647770 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9ed0143dcbf2549155100ed1021e5ec8f2eb0fbc68e14198d0dc5aa9ef9cf8cf"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.651710 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: E0131 05:39:17.664071 4712 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.670052 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.671536 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.671563 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.671572 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.671585 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.671594 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.695806 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a64622a4e9639316cfca1770d2ba9e9bdd835107d752668612c2d0a1451b639\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:38:58Z\\\",\\\"message\\\":\\\"W0131 05:38:57.866352 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0131 05:38:57.866967 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769837937 cert, and key in /tmp/serving-cert-3746916109/serving-signer.crt, /tmp/serving-cert-3746916109/serving-signer.key\\\\nI0131 05:38:58.400608 1 observer_polling.go:159] Starting file observer\\\\nW0131 05:38:58.403643 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0131 05:38:58.403796 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:38:58.404445 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3746916109/tls.crt::/tmp/serving-cert-3746916109/tls.key\\\\\\\"\\\\nF0131 05:38:58.643581 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.700060 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4bzd\" (UniqueName: \"kubernetes.io/projected/83251fc5-49c0-48ed-b6a1-debf4fb30255-kube-api-access-z4bzd\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.700106 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83251fc5-49c0-48ed-b6a1-debf4fb30255-hosts-file\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.716878 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.734809 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.775025 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.775066 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.775078 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.775094 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.775104 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.800669 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4bzd\" (UniqueName: \"kubernetes.io/projected/83251fc5-49c0-48ed-b6a1-debf4fb30255-kube-api-access-z4bzd\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.800727 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83251fc5-49c0-48ed-b6a1-debf4fb30255-hosts-file\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.800823 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83251fc5-49c0-48ed-b6a1-debf4fb30255-hosts-file\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.805925 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.856794 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.861822 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4bzd\" (UniqueName: \"kubernetes.io/projected/83251fc5-49c0-48ed-b6a1-debf4fb30255-kube-api-access-z4bzd\") pod \"node-resolver-zg9rz\" (UID: \"83251fc5-49c0-48ed-b6a1-debf4fb30255\") " pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.880288 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.880322 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.880332 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.880347 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.880357 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.907029 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.937533 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.952004 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zg9rz" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.982756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.982780 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.982789 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.982801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:17 crc kubenswrapper[4712]: I0131 05:39:17.982810 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:17Z","lastTransitionTime":"2026-01-31T05:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.004052 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.016483 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.027686 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.043393 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.056077 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.075857 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-6hwmd"] Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.076080 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.076206 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.079054 4712 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.079099 4712 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.080595 4712 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.080636 4712 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.082218 4712 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.082245 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.082260 4712 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.082273 4712 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.082295 4712 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.084653 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.084690 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.084700 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.084713 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.084722 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.094978 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.106200 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.109956 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.110088 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:20.110063448 +0000 UTC m=+26.203945279 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.120983 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.139360 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.161585 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.176417 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.186452 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.186484 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.186494 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.186511 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.186520 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.194730 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.209988 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210462 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210513 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210542 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210568 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqspq\" (UniqueName: \"kubernetes.io/projected/eaac0246-673f-4670-8b7b-c27ecaf0d847-kube-api-access-lqspq\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210598 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210622 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210644 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.210668 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eaac0246-673f-4670-8b7b-c27ecaf0d847-rootfs\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210685 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210723 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210767 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210781 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210788 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:20.210768338 +0000 UTC m=+26.304650259 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210794 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210816 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:20.210803619 +0000 UTC m=+26.304685540 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210695 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210850 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:20.210833029 +0000 UTC m=+26.304714940 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210860 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210878 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.210931 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:20.210908591 +0000 UTC m=+26.304790502 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.231259 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.244491 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.258423 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.275733 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.288428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.288479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.288492 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.288513 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.288527 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.311732 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eaac0246-673f-4670-8b7b-c27ecaf0d847-rootfs\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.311791 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.311809 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqspq\" (UniqueName: \"kubernetes.io/projected/eaac0246-673f-4670-8b7b-c27ecaf0d847-kube-api-access-lqspq\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.311843 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.311858 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eaac0246-673f-4670-8b7b-c27ecaf0d847-rootfs\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.392053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.392091 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.392100 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.392122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.392131 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.474875 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 03:07:26.030154579 +0000 UTC Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.477315 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-zbfp7"] Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.477604 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.486831 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.497101 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.497227 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.497247 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.497112 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.497783 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-sn2n4"] Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.507240 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.507413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.507431 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.507455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.507470 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.498726 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.509108 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.509226 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.509305 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.509324 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.509372 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.509553 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.515419 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.515918 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.517043 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.517245 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.517301 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.517638 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.518538 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.519060 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.519640 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.520563 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.521122 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.522780 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.523353 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.524374 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.524814 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.525302 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.526119 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.526613 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.527483 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.527830 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.528357 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.529368 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.529775 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.530716 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.531109 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.532195 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.532617 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.533192 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.534389 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.534851 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.535769 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.536213 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.536993 4712 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.537092 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.538614 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.539449 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.539828 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.541419 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.542010 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.544011 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.544692 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.549051 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.549606 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.550408 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.550610 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.551316 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.551938 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.552479 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.553029 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.553574 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.554371 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.554874 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.557443 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.557936 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.558505 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.560221 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.560684 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.579803 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.598714 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.609958 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.610021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.610035 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.610056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.610093 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.613980 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614023 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-conf-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614043 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-multus\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614061 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwh7w\" (UniqueName: \"kubernetes.io/projected/36a43db9-04c0-48fb-8ee1-6e77c26672b3-kube-api-access-lwh7w\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614210 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-cnibin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614228 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-kubelet\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614242 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-hostroot\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614256 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-k8s-cni-cncf-io\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614283 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-system-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614298 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-cni-binary-copy\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614317 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-socket-dir-parent\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614335 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-binary-copy\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614424 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-etc-kubernetes\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614443 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614459 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-multus-daemon-config\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614475 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-multus-certs\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614507 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cnibin\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614574 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614595 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-os-release\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614611 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-os-release\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614672 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-bin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614691 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52btf\" (UniqueName: \"kubernetes.io/projected/f4943935-d884-4777-b679-bfabc7235a23-kube-api-access-52btf\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614743 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-system-cni-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.614809 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-netns\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.616279 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.631358 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.651876 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zg9rz" event={"ID":"83251fc5-49c0-48ed-b6a1-debf4fb30255","Type":"ContainerStarted","Data":"4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.651956 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zg9rz" event={"ID":"83251fc5-49c0-48ed-b6a1-debf4fb30255","Type":"ContainerStarted","Data":"3bc874a4c147c9d074462bb8e3a1e154e18d6e37d48978503eae908af726909c"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.653520 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.688504 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.712438 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.713604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.713668 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.713680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.713706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.713727 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716060 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-cnibin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716096 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-k8s-cni-cncf-io\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716115 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-kubelet\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716132 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-hostroot\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716148 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-system-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716164 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-cni-binary-copy\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716199 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-socket-dir-parent\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716220 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-binary-copy\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716243 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716260 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-etc-kubernetes\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716268 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-kubelet\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716283 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-multus-daemon-config\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716346 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-multus-certs\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716353 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-cnibin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716401 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cnibin\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716454 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-multus-certs\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716380 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cnibin\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716479 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-k8s-cni-cncf-io\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716577 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716583 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-system-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716618 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-os-release\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716640 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-os-release\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716945 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-cni-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.716977 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-socket-dir-parent\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717017 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-hostroot\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717050 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-os-release\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717086 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-os-release\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717138 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-netns\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717160 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-bin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717202 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52btf\" (UniqueName: \"kubernetes.io/projected/f4943935-d884-4777-b679-bfabc7235a23-kube-api-access-52btf\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717220 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-system-cni-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717235 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-conf-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717271 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717305 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-multus\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717331 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwh7w\" (UniqueName: \"kubernetes.io/projected/36a43db9-04c0-48fb-8ee1-6e77c26672b3-kube-api-access-lwh7w\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717365 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-multus-daemon-config\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717567 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-system-cni-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717590 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-bin\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717631 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-multus-conf-dir\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717665 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-run-netns\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717677 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-host-var-lib-cni-multus\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.717729 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4943935-d884-4777-b679-bfabc7235a23-etc-kubernetes\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.718021 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/36a43db9-04c0-48fb-8ee1-6e77c26672b3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.718204 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-binary-copy\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.718260 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/36a43db9-04c0-48fb-8ee1-6e77c26672b3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.718585 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f4943935-d884-4777-b679-bfabc7235a23-cni-binary-copy\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.729678 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.737822 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwh7w\" (UniqueName: \"kubernetes.io/projected/36a43db9-04c0-48fb-8ee1-6e77c26672b3-kube-api-access-lwh7w\") pod \"multus-additional-cni-plugins-sn2n4\" (UID: \"36a43db9-04c0-48fb-8ee1-6e77c26672b3\") " pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.740700 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52btf\" (UniqueName: \"kubernetes.io/projected/f4943935-d884-4777-b679-bfabc7235a23-kube-api-access-52btf\") pod \"multus-zbfp7\" (UID: \"f4943935-d884-4777-b679-bfabc7235a23\") " pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.748193 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.754140 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.755064 4712 scope.go:117] "RemoveContainer" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" Jan 31 05:39:18 crc kubenswrapper[4712]: E0131 05:39:18.755259 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.762325 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.777591 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.796351 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.809229 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.809459 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zbfp7" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.816487 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.816523 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.816532 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.816548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.816558 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.821431 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.826595 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.831487 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4943935_d884_4777_b679_bfabc7235a23.slice/crio-0a905eda4a3da7e7c261028a3f8d684439fb1a5d80bb6c97d510a562e34a67b4 WatchSource:0}: Error finding container 0a905eda4a3da7e7c261028a3f8d684439fb1a5d80bb6c97d510a562e34a67b4: Status 404 returned error can't find the container with id 0a905eda4a3da7e7c261028a3f8d684439fb1a5d80bb6c97d510a562e34a67b4 Jan 31 05:39:18 crc kubenswrapper[4712]: W0131 05:39:18.838841 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36a43db9_04c0_48fb_8ee1_6e77c26672b3.slice/crio-4968e319bdbf2d135294397a734d012a6f48a4ef1a80993fd5212833e7906e41 WatchSource:0}: Error finding container 4968e319bdbf2d135294397a734d012a6f48a4ef1a80993fd5212833e7906e41: Status 404 returned error can't find the container with id 4968e319bdbf2d135294397a734d012a6f48a4ef1a80993fd5212833e7906e41 Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.846932 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.860825 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.880945 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.896531 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.903686 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6r6bn"] Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.906375 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.909518 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.909609 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.909795 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.911946 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.911943 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.912130 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.914486 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.920447 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.920497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.920510 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.920530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.920542 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:18Z","lastTransitionTime":"2026-01-31T05:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.922364 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.936337 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.952058 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.985948 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.986309 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 31 05:39:18 crc kubenswrapper[4712]: I0131 05:39:18.991447 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqspq\" (UniqueName: \"kubernetes.io/projected/eaac0246-673f-4670-8b7b-c27ecaf0d847-kube-api-access-lqspq\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.005940 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022525 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022562 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022580 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022609 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022624 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022663 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022689 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022703 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022719 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022733 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022751 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022769 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022912 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022975 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.022999 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023022 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023081 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023114 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023141 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023250 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023237 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023300 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xdhx\" (UniqueName: \"kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023270 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.023384 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.048129 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.094617 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123844 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123895 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xdhx\" (UniqueName: \"kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123921 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123944 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123969 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.123978 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124003 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124061 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124068 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124122 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124150 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124190 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124210 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124228 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124252 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124269 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124291 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124310 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124326 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124341 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124339 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124371 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124402 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124403 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124427 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124476 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124503 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124938 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124957 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125000 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125000 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125018 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125035 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.124956 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125122 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125109 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125191 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.125660 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.126280 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.126543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.126561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.126581 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.126595 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.131228 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.132907 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.149194 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xdhx\" (UniqueName: \"kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx\") pod \"ovnkube-node-6r6bn\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.157449 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.173282 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.187436 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.198006 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.211654 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.222566 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.226594 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.228370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.228396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.228404 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.228419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.228434 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: W0131 05:39:19.236593 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f522e2e_c0c8_44a7_b834_ac367dba0c9c.slice/crio-f1154e1c1699bba78ba81f65dec62030476ca9f07eddbc32e2d2650acba2eed6 WatchSource:0}: Error finding container f1154e1c1699bba78ba81f65dec62030476ca9f07eddbc32e2d2650acba2eed6: Status 404 returned error can't find the container with id f1154e1c1699bba78ba81f65dec62030476ca9f07eddbc32e2d2650acba2eed6 Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.252943 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.272066 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.291311 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: E0131 05:39:19.312359 4712 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 31 05:39:19 crc kubenswrapper[4712]: E0131 05:39:19.312443 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls podName:eaac0246-673f-4670-8b7b-c27ecaf0d847 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:19.812423488 +0000 UTC m=+25.906305329 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls") pod "machine-config-daemon-6hwmd" (UID: "eaac0246-673f-4670-8b7b-c27ecaf0d847") : failed to sync secret cache: timed out waiting for the condition Jan 31 05:39:19 crc kubenswrapper[4712]: E0131 05:39:19.312673 4712 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Jan 31 05:39:19 crc kubenswrapper[4712]: E0131 05:39:19.312708 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config podName:eaac0246-673f-4670-8b7b-c27ecaf0d847 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:19.812700355 +0000 UTC m=+25.906582186 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config") pod "machine-config-daemon-6hwmd" (UID: "eaac0246-673f-4670-8b7b-c27ecaf0d847") : failed to sync configmap cache: timed out waiting for the condition Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.333725 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.333758 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.333767 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.333782 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.333793 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.354723 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.367480 4712 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.382827 4712 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.404873 4712 csr.go:261] certificate signing request csr-vzjzf is approved, waiting to be issued Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.414228 4712 csr.go:257] certificate signing request csr-vzjzf is issued Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.436793 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.436824 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.436832 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.436845 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.436891 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.475203 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 17:21:37.042915019 +0000 UTC Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.483423 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.509923 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.539336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.539366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.539376 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.539389 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.539398 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.647527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.647570 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.647579 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.647596 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.647606 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.655000 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" exitCode=0 Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.655038 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.655080 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"f1154e1c1699bba78ba81f65dec62030476ca9f07eddbc32e2d2650acba2eed6"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.656597 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerStarted","Data":"a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.656667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerStarted","Data":"4968e319bdbf2d135294397a734d012a6f48a4ef1a80993fd5212833e7906e41"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.658231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerStarted","Data":"0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.658262 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerStarted","Data":"0a905eda4a3da7e7c261028a3f8d684439fb1a5d80bb6c97d510a562e34a67b4"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.675160 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.700894 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.715098 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.730000 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.743288 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.753859 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.753893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.753901 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.753917 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.753926 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.765316 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.790265 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.808975 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.823964 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.832677 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.832718 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.833826 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaac0246-673f-4670-8b7b-c27ecaf0d847-mcd-auth-proxy-config\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.835614 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eaac0246-673f-4670-8b7b-c27ecaf0d847-proxy-tls\") pod \"machine-config-daemon-6hwmd\" (UID: \"eaac0246-673f-4670-8b7b-c27ecaf0d847\") " pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.838937 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.851271 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.855686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.855717 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.855726 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.855739 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.855748 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.860543 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.880466 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.887915 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.897204 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: W0131 05:39:19.898702 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeaac0246_673f_4670_8b7b_c27ecaf0d847.slice/crio-3a620a618c64a082a9111926f1e7d781bb2180a28ea4374f8cb480ee0bae98aa WatchSource:0}: Error finding container 3a620a618c64a082a9111926f1e7d781bb2180a28ea4374f8cb480ee0bae98aa: Status 404 returned error can't find the container with id 3a620a618c64a082a9111926f1e7d781bb2180a28ea4374f8cb480ee0bae98aa Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.914534 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.930280 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.949064 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.958004 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.958062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.958074 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.958095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.958107 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:19Z","lastTransitionTime":"2026-01-31T05:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.973936 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:19 crc kubenswrapper[4712]: I0131 05:39:19.991979 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:19Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.024927 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.060632 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.060666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.060675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.060689 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.060698 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.079222 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.129279 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.136249 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.136439 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:24.136363565 +0000 UTC m=+30.230245406 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.151213 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.162758 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.162801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.162810 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.162825 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.162837 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.179741 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.199362 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.210478 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.237387 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.237443 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.237471 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.237497 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.237580 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.237631 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:24.237614359 +0000 UTC m=+30.331496210 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238018 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238052 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:24.23804265 +0000 UTC m=+30.331924491 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238113 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238128 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238139 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238186 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:24.238157783 +0000 UTC m=+30.332039634 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238242 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238254 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238264 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.238290 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:24.238282946 +0000 UTC m=+30.332164787 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.265295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.265336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.265347 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.265364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.265375 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.367906 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.367943 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.367954 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.367972 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.367983 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.415628 4712 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-31 05:34:19 +0000 UTC, rotation deadline is 2026-11-29 23:06:13.235276478 +0000 UTC Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.415742 4712 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7265h26m52.819538657s for next certificate rotation Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.470608 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.470646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.470656 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.470674 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.470690 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.475530 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 14:33:17.545857059 +0000 UTC Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.503595 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.503761 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.503601 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.503632 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.503915 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:20 crc kubenswrapper[4712]: E0131 05:39:20.504006 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.534573 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-65vvn"] Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.534917 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.536915 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.537446 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.537761 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.539262 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.553560 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.570828 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.572796 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.572856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.572870 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.572889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.572902 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.587693 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.600821 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.613919 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.631340 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.641133 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv65h\" (UniqueName: \"kubernetes.io/projected/b4a1f052-e167-4d29-ba2d-82b193736f59-kube-api-access-dv65h\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.641214 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4a1f052-e167-4d29-ba2d-82b193736f59-host\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.641241 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b4a1f052-e167-4d29-ba2d-82b193736f59-serviceca\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.650029 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664718 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664790 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664830 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664842 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664850 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.664861 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.665883 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae" exitCode=0 Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.665960 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.667131 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.669551 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.669605 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.669617 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"3a620a618c64a082a9111926f1e7d781bb2180a28ea4374f8cb480ee0bae98aa"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.675410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.675450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.675460 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.675476 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.675486 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.678840 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.689628 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.704158 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.724594 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.740762 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.741992 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4a1f052-e167-4d29-ba2d-82b193736f59-host\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.742054 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b4a1f052-e167-4d29-ba2d-82b193736f59-serviceca\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.742252 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv65h\" (UniqueName: \"kubernetes.io/projected/b4a1f052-e167-4d29-ba2d-82b193736f59-kube-api-access-dv65h\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.742586 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b4a1f052-e167-4d29-ba2d-82b193736f59-host\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.743728 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b4a1f052-e167-4d29-ba2d-82b193736f59-serviceca\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.755613 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.765979 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv65h\" (UniqueName: \"kubernetes.io/projected/b4a1f052-e167-4d29-ba2d-82b193736f59-kube-api-access-dv65h\") pod \"node-ca-65vvn\" (UID: \"b4a1f052-e167-4d29-ba2d-82b193736f59\") " pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.773215 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.783908 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.783949 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.783960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.783977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.783987 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.794100 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.808648 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.824659 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.840590 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.846899 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-65vvn" Jan 31 05:39:20 crc kubenswrapper[4712]: W0131 05:39:20.858819 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4a1f052_e167_4d29_ba2d_82b193736f59.slice/crio-02172f2dc4eba529d807caf32d9f00a7638cff96628d7d58639bd4940a84679f WatchSource:0}: Error finding container 02172f2dc4eba529d807caf32d9f00a7638cff96628d7d58639bd4940a84679f: Status 404 returned error can't find the container with id 02172f2dc4eba529d807caf32d9f00a7638cff96628d7d58639bd4940a84679f Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.881960 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.886419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.886442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.886450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.886465 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.886475 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.928825 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.969345 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:20Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.988696 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.988730 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.988738 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.988754 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:20 crc kubenswrapper[4712]: I0131 05:39:20.988765 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:20Z","lastTransitionTime":"2026-01-31T05:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.003119 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.052559 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.086054 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.090829 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.090879 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.090889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.090907 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.090917 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.138910 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.163502 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.193235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.193270 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.193283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.193297 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.193308 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.202004 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.240577 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.299473 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.299513 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.299522 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.299536 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.299548 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.401349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.401384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.401396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.401410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.401418 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.476194 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 14:59:44.679135337 +0000 UTC Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.506750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.506777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.506785 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.506798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.506806 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.608736 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.608776 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.608787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.608803 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.608815 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.673999 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerStarted","Data":"a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.675496 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-65vvn" event={"ID":"b4a1f052-e167-4d29-ba2d-82b193736f59","Type":"ContainerStarted","Data":"dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.675531 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-65vvn" event={"ID":"b4a1f052-e167-4d29-ba2d-82b193736f59","Type":"ContainerStarted","Data":"02172f2dc4eba529d807caf32d9f00a7638cff96628d7d58639bd4940a84679f"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.688129 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.701412 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.711436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.711481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.711491 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.711508 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.711523 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.722513 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.742818 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.763221 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.777776 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.789596 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.817354 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.818801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.818852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.818868 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.818887 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.818900 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.829462 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.848569 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.866208 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.887146 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.920598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.920631 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.920640 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.920654 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.920663 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:21Z","lastTransitionTime":"2026-01-31T05:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:21 crc kubenswrapper[4712]: I0131 05:39:21.943990 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.000073 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:21Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.027079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.027142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.027159 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.027235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.027252 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.047653 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.070203 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.084043 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.101430 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.116191 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.128913 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.137025 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.137064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.137073 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.137090 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.137101 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.148692 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.172779 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.184278 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.197465 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.239822 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.239862 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.239874 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.239889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.239899 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.243471 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.281890 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.322059 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.343568 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.343868 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.343960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.344051 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.344123 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.366632 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.446697 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.446726 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.446734 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.446750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.446758 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.477269 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 10:16:24.940074069 +0000 UTC Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.503647 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:22 crc kubenswrapper[4712]: E0131 05:39:22.503782 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.504222 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:22 crc kubenswrapper[4712]: E0131 05:39:22.504284 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.504397 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:22 crc kubenswrapper[4712]: E0131 05:39:22.504448 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.548904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.548941 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.548951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.548965 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.548973 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.651346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.651388 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.651396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.651412 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.651422 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.683162 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9" exitCode=0 Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.683257 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.697786 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.715774 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.731298 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.745731 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.753624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.753842 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.753856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.753873 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.753883 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.762863 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.777713 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.799636 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.815001 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.829827 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.850851 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.856289 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.856329 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.856340 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.856359 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.856372 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.867214 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.878771 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.892080 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.920239 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:22Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.959117 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.959154 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.959179 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.959196 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:22 crc kubenswrapper[4712]: I0131 05:39:22.959207 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:22Z","lastTransitionTime":"2026-01-31T05:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.061342 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.061390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.061401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.061418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.061429 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.164067 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.164110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.164122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.164157 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.164183 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.274417 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.274461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.274472 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.274489 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.274500 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.377211 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.377556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.377569 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.377587 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.377599 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.478487 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 12:36:59.951644404 +0000 UTC Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.480037 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.480077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.480088 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.480105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.480116 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.582746 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.582778 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.582795 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.582814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.582826 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.685325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.685456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.685502 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.685533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.685545 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.689732 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.691902 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694" exitCode=0 Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.691952 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.709862 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.726882 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.744142 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.757929 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.774613 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.788276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.788305 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.788313 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.788325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.788334 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.791705 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.810159 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.846880 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.867126 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.886951 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.894262 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.894337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.894349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.894366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.894376 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.918305 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.935529 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.949557 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.961687 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.996481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.996527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.996538 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.996554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:23 crc kubenswrapper[4712]: I0131 05:39:23.996564 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:23Z","lastTransitionTime":"2026-01-31T05:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.099127 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.099209 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.099221 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.099256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.099270 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.181719 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.181901 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:32.181879534 +0000 UTC m=+38.275761375 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.201259 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.201291 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.201299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.201315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.201326 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.282367 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.282436 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.282473 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.282500 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282594 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282594 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282621 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282651 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282663 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282621 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282663 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:32.282646465 +0000 UTC m=+38.376528306 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282715 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282726 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282741 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:32.282724297 +0000 UTC m=+38.376606138 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282758 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:32.282750648 +0000 UTC m=+38.376632489 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.282768 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:32.282763438 +0000 UTC m=+38.376645279 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.302975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.303015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.303024 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.303039 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.303050 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.333488 4712 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.405828 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.405861 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.405869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.405882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.405891 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.479494 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 22:21:30.264623253 +0000 UTC Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.503118 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.503168 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.503275 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.503401 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.503513 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:24 crc kubenswrapper[4712]: E0131 05:39:24.503699 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.507930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.508107 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.508267 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.509097 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.509137 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.525365 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.538273 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.549509 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.560615 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.579953 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.594671 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.611802 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.611839 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.611849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.611865 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.611877 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.612048 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.629054 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.641753 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.655962 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.668524 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.680511 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.694310 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.698853 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c" exitCode=0 Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.698881 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.711843 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.713788 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.713841 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.713855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.713877 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.713891 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.728352 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.741860 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.754394 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.768403 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.782486 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.797564 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.808423 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.816898 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.816974 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.816997 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.817050 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.817075 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.821987 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.840479 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.856124 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.871746 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.902584 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.916012 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.919971 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.920019 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.920033 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.920051 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.920063 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:24Z","lastTransitionTime":"2026-01-31T05:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:24 crc kubenswrapper[4712]: I0131 05:39:24.928431 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.023007 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.023045 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.023059 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.023077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.023090 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.126471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.126900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.126911 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.126926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.126935 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.229484 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.229520 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.229532 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.229550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.229562 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.333560 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.333594 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.333604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.333637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.333646 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.446331 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.446382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.446393 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.446411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.446745 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.480637 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 14:34:01.917970143 +0000 UTC Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.549269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.549310 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.549319 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.549334 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.549344 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.651686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.651743 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.651755 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.651774 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.651787 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.704342 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.704542 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.707414 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerStarted","Data":"30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.719748 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.733365 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.743003 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.748955 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.753650 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.753685 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.753699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.753716 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.753727 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.767438 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.782126 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.795506 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.806400 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.817856 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.828929 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.839589 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.853139 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.856453 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.856475 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.856485 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.856500 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.856511 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.871788 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.894877 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.908085 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.919371 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.933094 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.947659 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.958680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.958713 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.958723 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.958738 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.958747 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:25Z","lastTransitionTime":"2026-01-31T05:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.962251 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.975937 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:25 crc kubenswrapper[4712]: I0131 05:39:25.994864 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.011115 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.023257 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.037263 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.052421 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.061030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.061056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.061081 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.061096 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.061104 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.066636 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.077689 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.091526 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.110328 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.164095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.164128 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.164139 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.164153 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.164163 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.265942 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.265985 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.265996 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.266011 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.266022 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.368614 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.368651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.368659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.368674 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.368683 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.471021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.471061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.471071 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.471086 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.471097 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.481574 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 15:11:31.006999788 +0000 UTC Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.503908 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.503929 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.503929 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:26 crc kubenswrapper[4712]: E0131 05:39:26.504030 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:26 crc kubenswrapper[4712]: E0131 05:39:26.504127 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:26 crc kubenswrapper[4712]: E0131 05:39:26.504226 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.575079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.575125 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.575137 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.575155 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.575192 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.677213 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.677258 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.677269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.677289 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.677302 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.710609 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.711215 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.741082 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.761624 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.775771 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.779211 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.779233 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.779243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.779256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.779266 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.793329 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.816145 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.835408 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.850343 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.867492 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.882205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.882263 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.882287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.882316 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.882337 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.888318 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.907273 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.931675 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.946380 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.966234 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.981704 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.984495 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.984537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.984550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.984567 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.984578 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:26Z","lastTransitionTime":"2026-01-31T05:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:26 crc kubenswrapper[4712]: I0131 05:39:26.996579 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.087659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.088332 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.088354 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.088383 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.088405 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.191784 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.191827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.191837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.191852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.191863 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.298728 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.298760 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.298769 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.298783 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.298794 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.400436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.400469 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.400477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.400491 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.400499 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.482552 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 02:55:59.360504292 +0000 UTC Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.502042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.502083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.502094 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.502112 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.502122 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.570184 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.570214 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.570223 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.570235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.570244 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.582961 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.587421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.587456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.587466 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.587480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.587489 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.599987 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.603814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.603852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.603862 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.603876 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.603884 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.621690 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.625915 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.625953 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.625963 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.625978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.625987 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.641256 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.644463 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.644494 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.644505 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.644533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.644545 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.657926 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: E0131 05:39:27.658042 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.659631 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.659703 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.659720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.659850 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.659882 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.714990 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770" exitCode=0 Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.715057 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.715136 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.729946 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.751607 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.762386 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.762429 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.762439 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.762454 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.762464 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.767360 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.781792 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.797057 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.812821 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.827039 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.838281 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.869633 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.869674 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.869685 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.869702 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.869713 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.882244 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.910070 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.937006 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.954474 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.965303 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.974097 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.974262 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.974337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.974442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.974516 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:27Z","lastTransitionTime":"2026-01-31T05:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:27 crc kubenswrapper[4712]: I0131 05:39:27.979215 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:27Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.077036 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.077066 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.077075 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.077087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.077096 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.179481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.179540 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.179558 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.179587 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.179609 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.282432 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.282471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.282481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.282497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.282507 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.385667 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.385701 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.385710 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.385724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.385732 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.483526 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 02:51:56.255484465 +0000 UTC Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.487142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.487219 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.487231 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.487247 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.487259 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.503480 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.503543 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.503487 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:28 crc kubenswrapper[4712]: E0131 05:39:28.503624 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:28 crc kubenswrapper[4712]: E0131 05:39:28.503740 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:28 crc kubenswrapper[4712]: E0131 05:39:28.503828 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.589600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.589637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.589646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.589659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.589668 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.691995 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.692045 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.692058 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.692082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.692096 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.721122 4712 generic.go:334] "Generic (PLEG): container finished" podID="36a43db9-04c0-48fb-8ee1-6e77c26672b3" containerID="a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7" exitCode=0 Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.721201 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerDied","Data":"a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.721329 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.747683 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.769814 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.782447 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794415 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794938 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794950 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794973 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.794984 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.807770 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.825121 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.842814 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.858671 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.876375 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.891166 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.896515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.896718 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.896786 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.896858 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.896934 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.904067 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.913093 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.924610 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.937862 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:28Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.999309 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.999352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.999363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.999382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:28 crc kubenswrapper[4712]: I0131 05:39:28.999393 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:28Z","lastTransitionTime":"2026-01-31T05:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.108094 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.108436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.108445 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.108461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.108470 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.210624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.210666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.210679 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.210696 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.210710 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.314083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.314121 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.314133 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.314149 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.314162 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.416239 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.416276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.416289 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.416305 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.416316 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.484223 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 22:45:45.707549035 +0000 UTC Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.518161 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.518206 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.518216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.518228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.518236 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.619756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.619791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.619800 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.619814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.619823 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.721975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.722018 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.722029 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.722044 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.722056 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.726494 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" event={"ID":"36a43db9-04c0-48fb-8ee1-6e77c26672b3","Type":"ContainerStarted","Data":"ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.741974 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.761372 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.778198 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.793261 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.809011 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.821842 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.823747 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.823864 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.823932 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.823993 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.824048 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.837206 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.847919 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.859204 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.871894 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.890344 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.905643 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.919576 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.926641 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.926666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.926676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.926691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.926701 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:29Z","lastTransitionTime":"2026-01-31T05:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:29 crc kubenswrapper[4712]: I0131 05:39:29.931796 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.028818 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.028847 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.028855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.028869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.028881 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.130869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.130903 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.130913 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.130927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.130937 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.232995 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.233072 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.233083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.233098 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.233108 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.335237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.335275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.335283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.335299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.335308 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.437071 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.437117 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.437129 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.437147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.437161 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.484848 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 11:28:59.698128036 +0000 UTC Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.503248 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.503283 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.503301 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:30 crc kubenswrapper[4712]: E0131 05:39:30.503380 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:30 crc kubenswrapper[4712]: E0131 05:39:30.503472 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:30 crc kubenswrapper[4712]: E0131 05:39:30.503548 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.503969 4712 scope.go:117] "RemoveContainer" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.540203 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.540239 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.540249 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.540268 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.540279 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.642456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.642500 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.642510 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.642526 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.642535 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.744773 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.744839 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.744851 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.744869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.744882 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.847311 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.847343 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.847352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.847366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.847375 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.949845 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.949934 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.949946 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.949963 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:30 crc kubenswrapper[4712]: I0131 05:39:30.949974 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:30Z","lastTransitionTime":"2026-01-31T05:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.052575 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.052620 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.052630 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.052675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.052688 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.155122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.155163 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.155196 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.155216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.155226 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.257204 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.257249 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.257264 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.257282 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.257292 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.270664 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm"] Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.271077 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.273806 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.273978 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.295077 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.311786 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.325526 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.337004 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.347644 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.359501 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.359532 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.359541 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.359554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.359563 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.362240 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.377730 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.388940 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.403388 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.416192 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.436250 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.452805 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.452900 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.452940 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3711b16f-9595-405e-90ea-ecc5eda64737-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.452996 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm844\" (UniqueName: \"kubernetes.io/projected/3711b16f-9595-405e-90ea-ecc5eda64737-kube-api-access-zm844\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.460865 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.462275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.462316 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.462330 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.462346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.462362 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.475930 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.485326 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 11:03:19.503507872 +0000 UTC Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.487981 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.502377 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.553740 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.553782 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3711b16f-9595-405e-90ea-ecc5eda64737-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.553810 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm844\" (UniqueName: \"kubernetes.io/projected/3711b16f-9595-405e-90ea-ecc5eda64737-kube-api-access-zm844\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.553839 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.554456 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.554463 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3711b16f-9595-405e-90ea-ecc5eda64737-env-overrides\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.558715 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3711b16f-9595-405e-90ea-ecc5eda64737-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.565021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.565044 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.565053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.565067 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.565076 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.569975 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm844\" (UniqueName: \"kubernetes.io/projected/3711b16f-9595-405e-90ea-ecc5eda64737-kube-api-access-zm844\") pod \"ovnkube-control-plane-749d76644c-76qlm\" (UID: \"3711b16f-9595-405e-90ea-ecc5eda64737\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.582831 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" Jan 31 05:39:31 crc kubenswrapper[4712]: W0131 05:39:31.603791 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3711b16f_9595_405e_90ea_ecc5eda64737.slice/crio-c0d2dde57396dd9f6b6ca01aab8fbd9502e9f51a07ca1013a7408d6900bc8ab8 WatchSource:0}: Error finding container c0d2dde57396dd9f6b6ca01aab8fbd9502e9f51a07ca1013a7408d6900bc8ab8: Status 404 returned error can't find the container with id c0d2dde57396dd9f6b6ca01aab8fbd9502e9f51a07ca1013a7408d6900bc8ab8 Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.667238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.667275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.667285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.667302 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.667313 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.733556 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" event={"ID":"3711b16f-9595-405e-90ea-ecc5eda64737","Type":"ContainerStarted","Data":"c0d2dde57396dd9f6b6ca01aab8fbd9502e9f51a07ca1013a7408d6900bc8ab8"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.735359 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/0.log" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.738309 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c" exitCode=1 Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.738354 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.739169 4712 scope.go:117] "RemoveContainer" containerID="0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.740193 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.742687 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.743319 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.757797 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.770910 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.770942 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.770952 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.770967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.770976 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.786307 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.798989 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.811548 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.826413 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.838977 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.850723 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.864292 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.872382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.872421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.872434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.872450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.872461 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.879190 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.895782 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.912839 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.929835 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.941107 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.954018 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.967883 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.974553 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.974591 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.974602 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.974619 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.974632 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:31Z","lastTransitionTime":"2026-01-31T05:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.981805 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:31 crc kubenswrapper[4712]: I0131 05:39:31.996726 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:31Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.010101 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.022284 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.035203 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.046483 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.057921 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.069823 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.077154 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.077205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.077218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.077238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.077251 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.083407 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.097376 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.108327 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.118469 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.136205 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.150353 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.163115 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.180368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.180421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.180437 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.180457 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.180472 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.260099 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.260373 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.260328597 +0000 UTC m=+54.354210448 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.282318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.282359 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.282370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.282389 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.282401 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.361396 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.361448 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.361471 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.361501 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361625 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361642 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361682 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.361666383 +0000 UTC m=+54.455548234 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361644 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361760 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361771 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361645 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361816 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361823 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361731 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.361713164 +0000 UTC m=+54.455594995 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361861 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.361843668 +0000 UTC m=+54.455725509 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.361878 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.361872298 +0000 UTC m=+54.455754139 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.376841 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-5svzb"] Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.377266 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.377332 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.385303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.385358 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.385372 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.385391 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.385409 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.392535 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.402539 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.414829 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.425988 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.448002 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.462594 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.476464 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.485639 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 15:30:00.583924415 +0000 UTC Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.487195 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.487220 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.487228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.487241 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.487251 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.493734 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.503711 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.503752 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.503714 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.503841 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.503915 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.504025 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.506937 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.518555 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.531876 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.551566 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.564024 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.564079 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn875\" (UniqueName: \"kubernetes.io/projected/03cf41cd-8606-4e98-a290-023fbe7d0956-kube-api-access-pn875\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.570870 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.589561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.589600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.589609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.589626 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.589637 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.599232 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.612788 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.628448 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.664723 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.664791 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn875\" (UniqueName: \"kubernetes.io/projected/03cf41cd-8606-4e98-a290-023fbe7d0956-kube-api-access-pn875\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.665130 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: E0131 05:39:32.665192 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:33.165163088 +0000 UTC m=+39.259044929 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.681640 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn875\" (UniqueName: \"kubernetes.io/projected/03cf41cd-8606-4e98-a290-023fbe7d0956-kube-api-access-pn875\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.693504 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.693538 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.693547 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.693562 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.693572 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.748779 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" event={"ID":"3711b16f-9595-405e-90ea-ecc5eda64737","Type":"ContainerStarted","Data":"02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.748827 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" event={"ID":"3711b16f-9595-405e-90ea-ecc5eda64737","Type":"ContainerStarted","Data":"a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.751397 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/0.log" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.754469 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.754595 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.770840 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.787315 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.796315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.796355 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.796364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.796378 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.796389 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.808401 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.821621 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.836149 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.854747 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.890880 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.898922 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.898962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.898971 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.898990 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.899000 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:32Z","lastTransitionTime":"2026-01-31T05:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.913545 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.935937 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.949601 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.966208 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:32 crc kubenswrapper[4712]: I0131 05:39:32.989759 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:32Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.001823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.001871 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.001882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.001900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.001912 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.003314 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.018505 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.033622 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.047894 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.070875 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.085838 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.100907 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.104865 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.104914 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.104924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.104939 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.104949 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.115005 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.130687 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.145531 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.161948 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.170748 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:33 crc kubenswrapper[4712]: E0131 05:39:33.170883 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:33 crc kubenswrapper[4712]: E0131 05:39:33.170938 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:34.170922362 +0000 UTC m=+40.264804203 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.178383 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.192018 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.207117 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.207163 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.207186 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.207200 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.207210 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.209891 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.225138 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.237214 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.248599 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.261298 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.275607 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.289972 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.309857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.309902 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.309913 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.309930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.309939 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.412820 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.412855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.412863 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.412878 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.412887 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.486318 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 00:19:35.77031394 +0000 UTC Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.515219 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.515264 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.515281 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.515298 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.515308 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.617853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.617935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.617956 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.617981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.617997 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.720787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.720859 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.720872 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.720893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.720911 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.760580 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/1.log" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.761387 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/0.log" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.764808 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba" exitCode=1 Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.764879 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.764942 4712 scope.go:117] "RemoveContainer" containerID="0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.765961 4712 scope.go:117] "RemoveContainer" containerID="745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba" Jan 31 05:39:33 crc kubenswrapper[4712]: E0131 05:39:33.766199 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.782995 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.799289 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.817070 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.823209 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.823240 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.823254 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.823271 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.823281 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.835775 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.849825 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.864736 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.882040 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.899989 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.916074 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.926216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.926275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.926288 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.926312 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.926329 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:33Z","lastTransitionTime":"2026-01-31T05:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.933869 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.952872 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.977431 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:33 crc kubenswrapper[4712]: I0131 05:39:33.991038 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:33Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.007837 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.023796 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.028402 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.028446 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.028457 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.028475 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.028486 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.045105 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.130783 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.130823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.130836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.130855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.130867 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.180879 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.180987 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.181037 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:36.181024555 +0000 UTC m=+42.274906396 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.232900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.232949 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.232959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.232976 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.232984 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.334801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.334842 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.334853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.334870 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.334882 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.437205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.437245 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.437254 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.437298 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.437307 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.487227 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 04:39:38.248152715 +0000 UTC Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.503534 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.503570 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.503538 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.503678 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.503947 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.503996 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.504059 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:34 crc kubenswrapper[4712]: E0131 05:39:34.504125 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.517126 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.528717 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.540099 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.540149 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.540160 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.540207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.540220 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.542494 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.557447 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.570447 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.589346 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.613476 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.629460 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.640882 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.642136 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.642256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.642332 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.642414 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.642482 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.655344 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.666682 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.680390 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.696896 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.708377 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.721690 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.738882 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cad318d6ce774a7c0928018be740433f00bcbf7260c386b07b6786135ce9a0c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:30Z\\\",\\\"message\\\":\\\"k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0131 05:39:30.083058 5950 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0131 05:39:30.083081 5950 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:30.083093 5950 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:30.083117 5950 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:30.083133 5950 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:30.083138 5950 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:30.083208 5950 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:30.083235 5950 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:30.083209 5950 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:30.083216 5950 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:30.083253 5950 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:30.083280 5950 factory.go:656] Stopping watch factory\\\\nI0131 05:39:30.083300 5950 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.744401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.744527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.744782 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.744866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.744925 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.768970 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/1.log" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.846777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.846810 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.846820 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.846858 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.846869 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.949258 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.949306 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.949318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.949335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:34 crc kubenswrapper[4712]: I0131 05:39:34.949344 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:34Z","lastTransitionTime":"2026-01-31T05:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.051408 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.051687 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.051766 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.051841 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.051912 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.154525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.154582 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.154591 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.154606 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.154615 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.256722 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.256766 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.256777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.256812 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.256832 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.359874 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.359940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.359952 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.359969 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.359979 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.462850 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.462908 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.462920 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.462939 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.462951 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.487576 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 01:54:11.460522583 +0000 UTC Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.564663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.564713 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.564722 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.564739 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.564751 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.667207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.667244 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.667256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.667275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.667287 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.769699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.769728 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.769736 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.769750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.769759 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.872060 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.872110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.872120 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.872138 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.872149 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.975870 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.975955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.975972 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.976001 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:35 crc kubenswrapper[4712]: I0131 05:39:35.976021 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:35Z","lastTransitionTime":"2026-01-31T05:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.079020 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.079098 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.079117 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.079152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.079202 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.182461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.182893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.182912 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.182941 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.182959 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.204876 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.205131 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.205283 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:40.205253465 +0000 UTC m=+46.299135346 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.286505 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.286580 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.286598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.286626 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.286644 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.390250 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.390297 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.390310 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.390327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.390339 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.488543 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 20:26:53.037696424 +0000 UTC Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.493659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.493710 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.493730 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.493752 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.493764 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.503238 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.503361 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.503414 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.503499 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.503542 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.503538 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.503733 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:36 crc kubenswrapper[4712]: E0131 05:39:36.503861 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.596523 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.596560 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.596569 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.596582 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.596591 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.698923 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.698975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.698988 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.699008 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.699020 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.802339 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.802386 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.802396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.802413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.802425 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.904855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.904895 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.904920 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.904940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:36 crc kubenswrapper[4712]: I0131 05:39:36.904953 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:36Z","lastTransitionTime":"2026-01-31T05:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.007705 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.007736 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.007744 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.007758 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.007767 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.110637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.110676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.110684 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.110697 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.110707 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.212475 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.212543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.212565 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.212594 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.212615 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.315745 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.315783 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.315792 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.315806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.315817 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.417588 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.417659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.417670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.417688 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.417702 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.489498 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 21:13:28.536762923 +0000 UTC Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.519957 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.520006 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.520019 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.520042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.520055 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.626322 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.626368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.626380 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.626401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.626414 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.686256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.686337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.686363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.686394 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.686410 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.700050 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:37Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.704644 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.704701 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.704713 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.704735 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.704749 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.724885 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:37Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.732848 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.732910 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.732923 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.732947 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.732960 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.750698 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:37Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.755831 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.755864 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.755878 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.755900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.755954 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.768904 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:37Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.772565 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.772600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.772610 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.772624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.772636 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.785331 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:37Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:37 crc kubenswrapper[4712]: E0131 05:39:37.785526 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.786933 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.786967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.786979 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.787000 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.787012 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.889451 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.889514 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.889528 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.889543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.889554 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.992733 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.992779 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.992789 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.992810 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:37 crc kubenswrapper[4712]: I0131 05:39:37.992821 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:37Z","lastTransitionTime":"2026-01-31T05:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.095858 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.095935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.095950 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.095978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.095993 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.198873 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.198904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.198914 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.198927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.198936 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.301073 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.301126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.301142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.301169 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.301258 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.403319 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.403361 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.403369 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.403385 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.403401 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.489855 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 06:34:27.922937837 +0000 UTC Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.503284 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.503347 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:38 crc kubenswrapper[4712]: E0131 05:39:38.503396 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.503455 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:38 crc kubenswrapper[4712]: E0131 05:39:38.503491 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.503361 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:38 crc kubenswrapper[4712]: E0131 05:39:38.503599 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:38 crc kubenswrapper[4712]: E0131 05:39:38.503656 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.506141 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.506227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.506243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.506258 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.506326 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.608611 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.608660 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.608672 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.608692 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.608705 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.711112 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.711774 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.711792 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.711823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.711835 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.814249 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.814311 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.814327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.814351 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.814364 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.917293 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.917336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.917348 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.917366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:38 crc kubenswrapper[4712]: I0131 05:39:38.917378 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:38Z","lastTransitionTime":"2026-01-31T05:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.020064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.020134 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.020154 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.020226 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.020251 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.122448 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.122486 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.122497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.122511 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.122521 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.226160 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.226287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.226311 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.226342 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.226366 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.329830 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.329891 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.329909 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.329933 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.329950 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.432146 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.432235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.432253 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.432276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.432294 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.490436 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 10:25:27.123321149 +0000 UTC Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.535698 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.535763 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.535786 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.535815 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.535858 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.638814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.638864 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.638882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.638904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.638921 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.742441 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.742524 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.742544 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.742572 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.742590 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.845868 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.845955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.845994 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.846015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.846028 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.949733 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.949798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.949821 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.949854 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:39 crc kubenswrapper[4712]: I0131 05:39:39.949874 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:39Z","lastTransitionTime":"2026-01-31T05:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.052466 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.052539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.052556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.052583 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.052600 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.155928 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.155987 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.155999 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.156021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.156034 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.247913 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.248078 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.248144 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:39:48.248126811 +0000 UTC m=+54.342008652 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.259015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.259064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.259073 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.259088 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.259098 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.272498 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.273297 4712 scope.go:117] "RemoveContainer" containerID="745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.273436 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.293556 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.309619 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.325601 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.342261 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.358530 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.361062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.361138 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.361151 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.361187 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.361200 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.374896 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.387131 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.400895 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.413103 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.426999 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.439511 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.449728 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.462955 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.463493 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.463525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.463533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.463548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.463560 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.482710 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.491226 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 07:05:20.248569276 +0000 UTC Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.493811 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.503759 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.503847 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.503806 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.503965 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.503803 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.504079 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.504212 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:40 crc kubenswrapper[4712]: E0131 05:39:40.504385 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.505483 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:40Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.566299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.566355 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.566368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.566385 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.566396 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.668911 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.668962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.668971 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.668989 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.669001 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.772272 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.772382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.772411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.772452 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.772543 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.875955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.876066 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.876085 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.876362 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.876404 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.978622 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.978663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.978674 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.978691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:40 crc kubenswrapper[4712]: I0131 05:39:40.978701 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:40Z","lastTransitionTime":"2026-01-31T05:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.080783 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.080828 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.080840 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.080856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.080869 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.182983 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.183020 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.183029 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.183042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.183051 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.285648 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.285690 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.285705 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.285727 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.285742 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.387507 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.387550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.387562 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.387579 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.387591 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.490304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.490352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.490363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.490382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.490396 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.491568 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 09:28:39.761418902 +0000 UTC Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.593019 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.593070 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.593083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.593101 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.593113 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.695695 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.695747 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.695763 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.695787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.695803 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.797315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.797353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.797362 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.797378 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.797391 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.899704 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.899745 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.899756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.899774 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:41 crc kubenswrapper[4712]: I0131 05:39:41.899784 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:41Z","lastTransitionTime":"2026-01-31T05:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.002114 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.002152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.002162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.002202 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.002214 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.104647 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.104687 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.104702 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.104736 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.104751 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.207087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.207136 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.207146 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.207161 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.207187 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.310112 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.310152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.310160 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.310193 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.310206 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.413259 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.413324 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.413338 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.413356 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.413371 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.492270 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 15:33:22.995786613 +0000 UTC Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.503933 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.503967 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.503945 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.504069 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:42 crc kubenswrapper[4712]: E0131 05:39:42.504266 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:42 crc kubenswrapper[4712]: E0131 05:39:42.504404 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:42 crc kubenswrapper[4712]: E0131 05:39:42.504506 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:42 crc kubenswrapper[4712]: E0131 05:39:42.504640 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.515245 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.515288 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.515302 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.515318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.515332 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.618391 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.618445 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.618457 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.618472 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.618481 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.722194 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.722243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.722253 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.722269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.722280 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.824527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.824574 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.824589 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.824605 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.824616 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.927095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.927180 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.927193 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.927212 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:42 crc kubenswrapper[4712]: I0131 05:39:42.927223 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:42Z","lastTransitionTime":"2026-01-31T05:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.029610 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.029691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.029705 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.029729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.029745 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.132307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.132372 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.132383 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.132403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.132415 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.234139 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.234197 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.234206 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.234221 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.234231 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.337823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.337865 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.337878 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.337900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.337911 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.440589 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.440627 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.440635 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.440648 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.440658 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.492649 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 00:11:40.607770341 +0000 UTC Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.542962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.542999 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.543008 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.543022 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.543031 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.645613 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.645655 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.645665 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.645680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.645690 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.714924 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.729428 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.742399 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.747919 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.747975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.747987 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.748024 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.748038 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.758190 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.772981 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.788120 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.805220 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.823020 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.837820 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.850490 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.850600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.850613 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.850641 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.850656 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.854119 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.874529 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.891840 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.912017 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.928905 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.945435 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.953756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.953805 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.953814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.953833 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.953845 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:43Z","lastTransitionTime":"2026-01-31T05:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.968674 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:43 crc kubenswrapper[4712]: I0131 05:39:43.982816 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:43Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.056470 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.056527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.056537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.056557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.056569 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.159501 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.159579 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.159597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.159627 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.159647 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.264897 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.264991 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.265009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.265082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.265103 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.367711 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.367755 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.367768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.367786 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.367796 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.470627 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.470666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.470675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.470690 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.470700 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.493076 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 05:48:42.611356554 +0000 UTC Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.503506 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:44 crc kubenswrapper[4712]: E0131 05:39:44.503650 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.503677 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.503726 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:44 crc kubenswrapper[4712]: E0131 05:39:44.503805 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.503837 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:44 crc kubenswrapper[4712]: E0131 05:39:44.503886 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:44 crc kubenswrapper[4712]: E0131 05:39:44.503930 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.516366 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.537866 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.550553 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.565443 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.573381 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.573416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.573428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.573448 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.573461 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.579567 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.595803 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.614689 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.628781 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.642381 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.657598 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.671363 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.676660 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.676708 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.676722 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.676742 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.676753 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.686830 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.700926 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.715251 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.731233 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.742523 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.779355 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.779392 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.779405 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.779423 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.779437 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.882410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.882464 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.882479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.882499 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.882513 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.985814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.985877 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.985889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.985912 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:44 crc kubenswrapper[4712]: I0131 05:39:44.985925 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:44Z","lastTransitionTime":"2026-01-31T05:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.087869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.087916 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.087928 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.087947 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.087959 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.190089 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.190155 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.190213 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.190229 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.190240 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.292699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.292762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.292774 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.292813 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.292824 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.395202 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.395285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.395296 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.395315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.395327 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.494101 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 20:37:56.89822004 +0000 UTC Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.497944 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.497992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.498006 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.498028 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.498041 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.600207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.600292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.600307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.600334 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.600351 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.702863 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.702952 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.702962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.702984 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.702997 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.804966 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.805062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.805080 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.805105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.805119 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.908041 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.908455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.908663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.908883 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:45 crc kubenswrapper[4712]: I0131 05:39:45.909101 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:45Z","lastTransitionTime":"2026-01-31T05:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.015133 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.015212 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.015228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.015252 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.015269 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.118494 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.118551 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.118568 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.118592 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.118609 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.221122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.221242 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.221277 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.221321 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.221557 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.323069 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.323121 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.323149 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.323207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.323223 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.425116 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.425458 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.425537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.425610 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.425687 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.494522 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 14:03:05.738979118 +0000 UTC Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.503845 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.503863 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.504039 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:46 crc kubenswrapper[4712]: E0131 05:39:46.503969 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:46 crc kubenswrapper[4712]: E0131 05:39:46.504129 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.504053 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:46 crc kubenswrapper[4712]: E0131 05:39:46.504373 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:46 crc kubenswrapper[4712]: E0131 05:39:46.504378 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.527539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.527797 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.527879 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.527962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.528047 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.630678 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.630720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.630729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.630744 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.630753 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.733284 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.733327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.733337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.733352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.733361 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.836326 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.836409 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.836426 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.836464 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.836483 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.938852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.938902 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.938914 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.938934 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:46 crc kubenswrapper[4712]: I0131 05:39:46.938947 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:46Z","lastTransitionTime":"2026-01-31T05:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.042085 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.042158 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.042218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.042248 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.042268 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.144496 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.144544 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.144555 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.144576 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.144589 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.246784 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.246827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.246836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.246856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.246868 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.349422 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.349461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.349470 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.349489 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.349499 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.451694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.451747 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.451756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.451775 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.451793 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.495297 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 19:38:19.186795029 +0000 UTC Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.553604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.553652 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.553661 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.553678 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.553687 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.656243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.656287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.656299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.656318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.656334 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.759263 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.759307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.759319 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.759336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.759346 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.863095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.863142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.863157 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.863209 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.863236 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.927924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.927967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.927978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.927998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.928011 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: E0131 05:39:47.941856 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:47Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.945371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.945397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.945405 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.945419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.945427 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: E0131 05:39:47.962824 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:47Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.972072 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.972129 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.972144 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.972163 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.972209 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:47 crc kubenswrapper[4712]: E0131 05:39:47.987089 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:47Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.992135 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.992191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.992200 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.992217 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:47 crc kubenswrapper[4712]: I0131 05:39:47.992228 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:47Z","lastTransitionTime":"2026-01-31T05:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.009047 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.013653 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.013680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.013691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.013708 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.013719 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.030821 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.031042 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.032482 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.032518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.032530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.032543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.032554 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.134977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.135016 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.135026 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.135039 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.135051 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.237547 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.237588 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.237596 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.237611 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.237620 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.333507 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.333668 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:40:20.333641179 +0000 UTC m=+86.427523020 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.333805 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.333921 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.333960 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:04.333951947 +0000 UTC m=+70.427833788 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.340009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.340052 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.340061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.340078 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.340089 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.433850 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.434279 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.434312 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.434372 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.434427 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434433 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434461 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434475 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434517 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434549 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434562 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434526 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:20.434511253 +0000 UTC m=+86.528393094 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434584 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434604 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434621 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:20.434593575 +0000 UTC m=+86.528475516 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434649 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:20.434634286 +0000 UTC m=+86.528516197 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.434674 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:20.434662327 +0000 UTC m=+86.528544288 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.442104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.442152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.442169 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.442212 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.442227 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.445418 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.447744 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.472078 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.483542 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.494510 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.495476 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 20:53:18.173832636 +0000 UTC Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.503336 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.503374 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.503380 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.503444 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.503557 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.503538 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.503619 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:48 crc kubenswrapper[4712]: E0131 05:39:48.503731 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.506237 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.521404 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.543959 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.545028 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.545077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.545089 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.545111 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.545123 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.557279 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.572605 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.587217 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.596989 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.610002 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.621257 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.634707 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.647295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.647326 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.647335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.647349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.647358 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.648205 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.666452 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:48Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.749140 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.749449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.749530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.749601 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.749657 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.852276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.852307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.852315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.852327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.852336 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.954898 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.954940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.954951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.954967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:48 crc kubenswrapper[4712]: I0131 05:39:48.954977 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:48Z","lastTransitionTime":"2026-01-31T05:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.056937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.056997 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.057019 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.057041 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.057056 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.159709 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.159762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.159775 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.159795 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.159809 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.262498 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.262529 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.262539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.262554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.262563 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.364304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.364937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.364993 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.365027 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.365051 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.469274 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.469330 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.469346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.469366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.469379 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.495592 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 02:03:19.426626959 +0000 UTC Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.572050 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.572096 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.572107 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.572126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.572137 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.675429 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.675501 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.675515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.675542 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.675568 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.778605 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.778638 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.778646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.778661 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.778669 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.881135 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.881188 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.881201 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.881218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.881229 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.983586 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.983631 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.983641 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.983684 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:49 crc kubenswrapper[4712]: I0131 05:39:49.983693 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:49Z","lastTransitionTime":"2026-01-31T05:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.085947 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.085984 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.085992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.086008 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.086020 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.188519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.188557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.188565 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.188580 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.188591 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.291756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.291829 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.291848 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.291875 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.291898 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.395352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.395441 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.395489 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.395516 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.395530 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.496411 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 01:42:57.14959329 +0000 UTC Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.498493 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.498531 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.498542 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.498557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.498567 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.503988 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.504055 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.504101 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.504163 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:50 crc kubenswrapper[4712]: E0131 05:39:50.504332 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:50 crc kubenswrapper[4712]: E0131 05:39:50.504487 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:50 crc kubenswrapper[4712]: E0131 05:39:50.504690 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:50 crc kubenswrapper[4712]: E0131 05:39:50.504834 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.601238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.601320 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.601334 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.601381 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.601393 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.703597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.703658 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.703670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.703694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.703707 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.807022 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.807071 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.807081 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.807103 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.807116 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.910610 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.910659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.910669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.910686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:50 crc kubenswrapper[4712]: I0131 05:39:50.910698 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:50Z","lastTransitionTime":"2026-01-31T05:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.014772 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.014849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.014864 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.014887 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.014900 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.117895 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.117986 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.118012 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.118050 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.118076 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.221564 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.221640 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.221660 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.221694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.221716 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.324376 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.324442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.324461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.324490 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.324508 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.427793 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.427862 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.427879 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.427912 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.427930 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.496895 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:37:34.839664554 +0000 UTC Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.531336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.531401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.531422 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.531449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.531466 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.634408 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.634500 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.634515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.634542 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.634557 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.737286 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.737364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.737384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.737411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.737430 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.841015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.841079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.841094 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.841113 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.841125 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.944756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.944801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.944811 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.944827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:51 crc kubenswrapper[4712]: I0131 05:39:51.944837 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:51Z","lastTransitionTime":"2026-01-31T05:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.047688 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.047732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.047740 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.047756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.047766 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.150530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.150603 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.150636 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.150663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.150680 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.253731 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.253791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.253807 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.253827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.253842 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.356106 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.356187 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.356202 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.356220 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.356235 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.458323 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.458381 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.458394 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.458441 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.458452 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.498012 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 22:02:35.415148842 +0000 UTC Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.503540 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.503592 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.503639 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.503540 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:52 crc kubenswrapper[4712]: E0131 05:39:52.503745 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:52 crc kubenswrapper[4712]: E0131 05:39:52.503909 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:52 crc kubenswrapper[4712]: E0131 05:39:52.504014 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:52 crc kubenswrapper[4712]: E0131 05:39:52.504116 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.561527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.561572 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.561584 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.561602 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.561615 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.664307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.664622 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.664652 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.664669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.664679 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.767955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.768335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.768424 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.768504 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.768586 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.870659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.870706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.870717 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.870736 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.870749 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.973293 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.973376 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.973390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.973418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:52 crc kubenswrapper[4712]: I0131 05:39:52.973433 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:52Z","lastTransitionTime":"2026-01-31T05:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.076002 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.076032 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.076040 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.076053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.076063 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.179818 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.179861 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.179870 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.179885 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.179895 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.282796 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.282850 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.282858 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.282874 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.282886 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.386296 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.386344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.386355 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.386371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.386380 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.488997 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.489030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.489040 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.489058 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.489069 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.499185 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 04:45:38.605180362 +0000 UTC Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.591820 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.591862 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.591873 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.591889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.591900 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.694241 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.694278 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.694287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.694301 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.694310 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.796040 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.796076 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.796087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.796111 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.796125 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.898390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.898428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.898436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.898450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:53 crc kubenswrapper[4712]: I0131 05:39:53.898461 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:53Z","lastTransitionTime":"2026-01-31T05:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.000798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.000840 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.000850 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.000868 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.000879 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.102935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.102983 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.102994 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.103011 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.103022 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.205158 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.205828 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.205874 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.205894 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.205906 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.308729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.308794 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.308819 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.308846 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.308869 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.411237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.411349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.411368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.411380 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.411389 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.499890 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 17:14:02.730203742 +0000 UTC Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.503860 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.503905 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.503965 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:54 crc kubenswrapper[4712]: E0131 05:39:54.504052 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.504251 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:54 crc kubenswrapper[4712]: E0131 05:39:54.505023 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:54 crc kubenswrapper[4712]: E0131 05:39:54.505113 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:54 crc kubenswrapper[4712]: E0131 05:39:54.505286 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.505555 4712 scope.go:117] "RemoveContainer" containerID="745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.515164 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.515220 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.515232 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.515252 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.515267 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.535499 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.550865 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.566601 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.583140 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.599576 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.612879 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.616882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.616925 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.616961 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.616981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.616994 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.624860 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.640232 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.652464 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.667638 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.680457 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.692043 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.707831 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.720161 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.720419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.720429 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.720442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.720450 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.722539 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.746421 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.760226 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.771205 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.822290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.822333 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.822363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.822383 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.822393 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.843001 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/1.log" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.845443 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.845885 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.856313 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.869574 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.883977 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.903948 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.918943 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.924403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.924466 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.924476 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.924492 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.924519 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:54Z","lastTransitionTime":"2026-01-31T05:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.932853 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.945454 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.957846 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.970068 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.981739 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:54 crc kubenswrapper[4712]: I0131 05:39:54.996541 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:54Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.008831 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.022818 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.026998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.027044 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.027056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.027074 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.027086 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.040714 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.053986 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.067661 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.080798 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.130228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.130282 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.130293 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.130310 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.130320 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.232205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.232449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.232535 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.232630 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.232732 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.335198 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.335236 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.335245 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.335258 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.335266 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.437000 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.437038 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.437047 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.437086 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.437095 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.500733 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 15:37:25.60554689 +0000 UTC Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.540240 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.540280 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.540290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.540308 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.540319 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.643063 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.643105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.643118 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.643133 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.643142 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.745882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.745933 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.745942 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.745988 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.745998 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.848458 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.848509 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.848519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.848535 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.848545 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.850148 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/2.log" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.850742 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/1.log" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.853131 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" exitCode=1 Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.853184 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.853226 4712 scope.go:117] "RemoveContainer" containerID="745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.853842 4712 scope.go:117] "RemoveContainer" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" Jan 31 05:39:55 crc kubenswrapper[4712]: E0131 05:39:55.854003 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.871011 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.890310 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.904060 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.920648 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.932550 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.947197 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.950983 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.951039 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.951051 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.951068 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.951104 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:55Z","lastTransitionTime":"2026-01-31T05:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.961539 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.977610 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:55 crc kubenswrapper[4712]: I0131 05:39:55.992333 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:55Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.007628 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.026869 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.039236 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.053799 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.054028 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.054113 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.054131 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.054161 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.054208 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.073311 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://745ac28659346fbae411102757343433b3e2363c00f67005cf514c6668d70fba\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:33Z\\\",\\\"message\\\":\\\"ft-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.43\\\\\\\", Port:8798, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0131 05:39:33.216205 6186 services_controller.go:452] Built service openshift-machine-config-operator/machine-config-daemon per-node LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216267 6186 services_controller.go:453] Built service openshift-machine-config-operator/machine-config-daemon template LB for network=default: []services.LB{}\\\\nI0131 05:39:33.216294 6186 services_controller.go:454] Service openshift-machine-config-operator/machine-config-daemon for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0131 05:39:33.216344 6186 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.085453 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.098931 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.111155 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.157998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.158064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.158079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.158105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.158121 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.261783 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.261857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.261875 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.261904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.261921 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.364401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.364467 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.364480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.364505 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.364525 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.467935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.468010 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.468025 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.468048 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.468069 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.501878 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 20:14:48.437958454 +0000 UTC Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.503329 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.503399 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.503399 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.503578 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:56 crc kubenswrapper[4712]: E0131 05:39:56.503586 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:56 crc kubenswrapper[4712]: E0131 05:39:56.503713 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:56 crc kubenswrapper[4712]: E0131 05:39:56.504113 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:56 crc kubenswrapper[4712]: E0131 05:39:56.504264 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.569957 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.570012 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.570031 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.570053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.570068 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.672637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.672672 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.672679 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.672693 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.672704 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.774336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.774385 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.774394 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.774410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.774418 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.859056 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/2.log" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.862693 4712 scope.go:117] "RemoveContainer" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" Jan 31 05:39:56 crc kubenswrapper[4712]: E0131 05:39:56.862912 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.876804 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.876866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.876879 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.876959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.876976 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.877916 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.893517 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.907869 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.920573 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.932687 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.944220 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.955806 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.966287 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979425 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979435 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979460 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:56Z","lastTransitionTime":"2026-01-31T05:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.979461 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:56 crc kubenswrapper[4712]: I0131 05:39:56.991805 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:56Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.006913 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.019333 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.041122 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.053707 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.065373 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.079759 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.080949 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.080986 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.080994 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.081007 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.081017 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.090154 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:57Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.183375 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.183426 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.183439 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.183456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.183472 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.285963 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.286010 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.286021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.286036 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.286049 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.388886 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.388946 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.388959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.388977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.388988 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.490693 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.490732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.490741 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.490755 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.490764 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.503193 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 01:17:36.726402117 +0000 UTC Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.593318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.593347 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.593354 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.593366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.593374 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.695893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.695942 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.695959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.695981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.696032 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.798786 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.798827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.798840 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.798857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.798868 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.900939 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.900969 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.900979 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.901032 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:57 crc kubenswrapper[4712]: I0131 05:39:57.901042 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:57Z","lastTransitionTime":"2026-01-31T05:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.002937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.002972 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.002982 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.002994 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.003002 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.099418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.099451 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.099459 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.099471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.099479 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.113193 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:58Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.116293 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.116322 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.116331 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.116344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.116354 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.129523 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:58Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.132694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.132729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.132740 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.132754 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.132762 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.151190 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:58Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.155466 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.155513 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.155525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.155543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.155556 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.172242 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:58Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.177009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.177059 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.177074 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.177092 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.177103 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.192223 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:39:58Z is after 2025-08-24T17:21:41Z" Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.192387 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.194225 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.194266 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.194277 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.194295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.194306 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.298256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.298300 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.298310 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.298327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.298337 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.400760 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.400806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.400817 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.400831 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.400840 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.503389 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 10:18:16.798380661 +0000 UTC Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.503734 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.503773 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.503824 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.503879 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.503853 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.503954 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.504079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.504098 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.504110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.504124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.504145 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.504157 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: E0131 05:39:58.504047 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.605890 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.605930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.605940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.605954 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.605964 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.708874 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.708927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.708940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.708957 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.708970 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.811486 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.811528 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.811537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.811550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.811562 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.914399 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.914459 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.914471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.914484 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:58 crc kubenswrapper[4712]: I0131 05:39:58.914493 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:58Z","lastTransitionTime":"2026-01-31T05:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.016725 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.016768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.016779 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.016800 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.016815 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.119165 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.119225 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.119235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.119247 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.119256 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.221093 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.221143 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.221155 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.221197 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.221210 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.323371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.323407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.323416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.323429 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.323439 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.426079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.426135 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.426147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.426188 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.426200 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.503632 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 03:25:56.509302486 +0000 UTC Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.528483 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.528542 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.528557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.528576 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.528589 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.630844 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.630887 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.630899 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.630918 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.630934 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.734233 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.734285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.734300 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.734324 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.734339 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.836986 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.837024 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.837033 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.837048 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.837060 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.939762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.939833 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.939846 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.939873 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:39:59 crc kubenswrapper[4712]: I0131 05:39:59.939889 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:39:59Z","lastTransitionTime":"2026-01-31T05:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.043070 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.043268 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.043283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.043304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.043317 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.146358 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.146396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.146404 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.146417 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.146426 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.248458 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.248509 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.248522 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.248540 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.248552 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.350478 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.350517 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.350531 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.350546 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.350555 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.452556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.452597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.452609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.452626 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.452637 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.503727 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.503775 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.503785 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 04:20:37.672514225 +0000 UTC Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.503729 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.503846 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:00 crc kubenswrapper[4712]: E0131 05:40:00.503950 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:00 crc kubenswrapper[4712]: E0131 05:40:00.504145 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:00 crc kubenswrapper[4712]: E0131 05:40:00.504367 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:00 crc kubenswrapper[4712]: E0131 05:40:00.504783 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.554373 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.554424 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.554434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.554450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.554460 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.656530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.656561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.656569 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.656582 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.656591 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.759224 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.759261 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.759283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.759300 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.759310 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.862083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.862118 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.862128 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.862145 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.862155 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.965228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.965270 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.965287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.965309 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:00 crc kubenswrapper[4712]: I0131 05:40:00.965324 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:00Z","lastTransitionTime":"2026-01-31T05:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.067463 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.067500 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.067512 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.067528 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.067539 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.170388 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.170417 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.170427 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.170442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.170453 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.273109 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.273155 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.273184 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.273202 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.273234 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.375892 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.375932 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.375944 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.375958 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.375966 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.477849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.477883 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.477891 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.477904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.477912 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.504557 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 15:19:33.783734573 +0000 UTC Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.580450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.580498 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.580507 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.580523 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.580532 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.683413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.683471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.683487 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.683511 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.683528 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.785641 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.785691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.785707 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.785725 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.785736 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.887975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.888223 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.888948 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.889048 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.889133 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.991243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.991533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.991595 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.991675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:01 crc kubenswrapper[4712]: I0131 05:40:01.991749 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:01Z","lastTransitionTime":"2026-01-31T05:40:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.095027 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.095262 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.095325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.095419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.095479 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.197893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.197948 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.197962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.197982 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.197995 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.300303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.300360 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.300371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.300390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.300399 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.402529 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.402585 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.402593 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.402606 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.402616 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.503322 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.503349 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.503421 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:02 crc kubenswrapper[4712]: E0131 05:40:02.503504 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.503553 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:02 crc kubenswrapper[4712]: E0131 05:40:02.503852 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:02 crc kubenswrapper[4712]: E0131 05:40:02.503908 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:02 crc kubenswrapper[4712]: E0131 05:40:02.503985 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504682 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 10:36:20.256195078 +0000 UTC Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504797 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504816 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504838 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.504855 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.606966 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.607030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.607042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.607064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.607076 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.709101 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.709383 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.709476 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.709554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.709677 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.811954 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.812009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.812024 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.812041 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.812052 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.914259 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.914548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.914636 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.914732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:02 crc kubenswrapper[4712]: I0131 05:40:02.914790 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:02Z","lastTransitionTime":"2026-01-31T05:40:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.016508 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.016746 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.016839 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.016904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.016978 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.119242 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.119465 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.119544 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.119615 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.119690 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.222205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.222250 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.222282 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.222299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.222309 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.324234 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.324474 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.324546 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.324609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.324673 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.426962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.427224 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.427309 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.427384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.427448 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.505759 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 23:09:36.844510143 +0000 UTC Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.529939 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.530124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.530254 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.530349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.530438 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.632959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.633022 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.633034 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.633051 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.633063 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.735628 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.735669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.735680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.735700 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.735711 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.837793 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.837833 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.837847 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.837865 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.837875 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.940295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.940339 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.940350 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.940368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:03 crc kubenswrapper[4712]: I0131 05:40:03.940381 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:03Z","lastTransitionTime":"2026-01-31T05:40:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.042461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.042496 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.042504 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.042518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.042528 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.144453 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.144489 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.144502 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.144518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.144529 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.246261 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.246330 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.246341 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.246356 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.246367 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.348443 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.348518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.348530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.348554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.348571 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.430641 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.430864 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.430987 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:40:36.430957333 +0000 UTC m=+102.524839364 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.452238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.452287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.452297 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.452315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.452327 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.504006 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.504077 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.504136 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.504034 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.504273 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.504524 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.504356 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:04 crc kubenswrapper[4712]: E0131 05:40:04.504264 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.506589 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 23:32:15.558681819 +0000 UTC Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.520158 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.533278 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.549889 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.555686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.555937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.556020 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.556092 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.556148 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.564545 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.582486 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.599615 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.615742 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.632278 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.648780 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.658369 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.658403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.658416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.658435 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.658449 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.664961 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.680781 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.694061 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.707566 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.722550 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.737469 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.760878 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.761497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.761543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.761553 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.761572 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.761585 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.771119 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:04Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.863941 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.863992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.864002 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.864017 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.864030 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.966963 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.967037 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.967054 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.967078 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:04 crc kubenswrapper[4712]: I0131 05:40:04.967093 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:04Z","lastTransitionTime":"2026-01-31T05:40:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.069829 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.069881 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.069892 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.069928 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.069941 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.173241 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.173326 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.173346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.173376 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.173398 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.276679 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.276761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.276769 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.276796 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.276817 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.380364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.380418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.380430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.380450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.380462 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.484184 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.484239 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.484256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.484276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.484290 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.506683 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:37:36.385110523 +0000 UTC Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.587663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.587777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.587798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.587836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.587856 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.691483 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.691548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.691561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.691583 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.691598 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.794158 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.794232 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.794245 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.794261 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.794272 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.896793 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.896839 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.896849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.896869 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:05 crc kubenswrapper[4712]: I0131 05:40:05.896881 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:05Z","lastTransitionTime":"2026-01-31T05:40:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.000017 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.000084 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.000104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.000130 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.000145 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.103580 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.103625 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.103636 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.103653 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.103665 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.206239 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.206303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.206314 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.206337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.206350 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.309920 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.310011 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.310233 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.310261 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.310281 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.413705 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.413778 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.413794 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.413823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.413843 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.503845 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.503901 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.503953 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.503845 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:06 crc kubenswrapper[4712]: E0131 05:40:06.504070 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:06 crc kubenswrapper[4712]: E0131 05:40:06.504205 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:06 crc kubenswrapper[4712]: E0131 05:40:06.504372 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:06 crc kubenswrapper[4712]: E0131 05:40:06.504522 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.507208 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 12:15:45.232002029 +0000 UTC Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.517237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.517282 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.517295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.517314 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.517329 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.621238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.621303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.621320 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.621348 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.621371 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.725432 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.725484 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.725497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.725519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.725536 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.828649 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.828717 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.828733 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.828762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.828780 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.931390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.931443 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.931456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.931479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:06 crc kubenswrapper[4712]: I0131 05:40:06.931492 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:06Z","lastTransitionTime":"2026-01-31T05:40:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.034670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.034750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.034777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.034811 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.034833 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.137960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.138435 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.138549 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.138658 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.138742 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.242315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.242393 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.242411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.242445 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.242468 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.345875 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.345932 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.345944 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.345963 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.345976 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.449519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.449630 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.449669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.449711 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.449737 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.507921 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 21:06:56.031081561 +0000 UTC Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.552353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.552403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.552415 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.552433 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.552445 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.655676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.655728 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.655740 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.655759 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.655770 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.759038 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.759397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.759471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.759533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.759611 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.862265 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.862476 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.862547 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.862616 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.862703 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.965089 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.965537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.965549 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.965570 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:07 crc kubenswrapper[4712]: I0131 05:40:07.965581 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:07Z","lastTransitionTime":"2026-01-31T05:40:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.068303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.068353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.068367 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.068386 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.068399 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.170762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.171112 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.171198 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.171303 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.171422 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.273651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.273696 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.273709 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.273728 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.273741 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.377335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.377397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.377407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.377425 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.377435 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.438276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.438325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.438335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.438350 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.438361 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.455381 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.462559 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.462604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.462618 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.462641 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.462656 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.476445 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.481806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.481932 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.481999 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.482077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.482152 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.495323 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.500005 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.500060 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.500072 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.500092 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.500103 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.503696 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.503750 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.503776 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.503801 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.503889 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.503933 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.504107 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.504333 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.508613 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 08:58:20.383191766 +0000 UTC Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.512060 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.514854 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.514884 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.514893 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.514934 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.514953 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.526332 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: E0131 05:40:08.526437 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.527760 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.527788 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.527798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.527810 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.527820 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.630343 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.630421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.630430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.630473 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.630484 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.733282 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.733346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.733364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.733387 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.733405 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.836967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.837035 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.837053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.837082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.837101 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.898051 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/0.log" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.898139 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4943935-d884-4777-b679-bfabc7235a23" containerID="0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25" exitCode=1 Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.898233 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerDied","Data":"0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.898930 4712 scope.go:117] "RemoveContainer" containerID="0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.920221 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.934215 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.940419 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.940463 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.940475 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.940527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.940543 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:08Z","lastTransitionTime":"2026-01-31T05:40:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.949020 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.963922 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.974799 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:08 crc kubenswrapper[4712]: I0131 05:40:08.987137 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.000092 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:08Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.014057 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.027747 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.044440 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.044481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.044491 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.044508 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.044520 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.050039 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.063988 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.077719 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.088306 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.104205 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.120861 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.138099 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.146413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.146479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.146491 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.146510 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.146524 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.156370 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.249384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.249441 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.249453 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.249478 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.249494 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.353041 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.353090 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.353104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.353126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.353138 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.462227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.462280 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.462289 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.462306 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.462322 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.504048 4712 scope.go:117] "RemoveContainer" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" Jan 31 05:40:09 crc kubenswrapper[4712]: E0131 05:40:09.504219 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.509524 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 09:49:39.573468456 +0000 UTC Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.565561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.565590 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.565598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.565613 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.565622 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.668125 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.668166 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.668188 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.668207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.668219 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.771191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.771229 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.771241 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.771260 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.771272 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.873415 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.873448 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.873455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.873469 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.873476 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.902363 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/0.log" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.902407 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerStarted","Data":"1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.919663 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.935750 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.950095 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.970729 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.976063 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.976090 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.976098 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.976110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.976120 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:09Z","lastTransitionTime":"2026-01-31T05:40:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:09 crc kubenswrapper[4712]: I0131 05:40:09.987421 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.000824 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:09Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.016380 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.029745 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.041685 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.056008 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.069165 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.078199 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.078242 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.078251 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.078266 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.078290 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.081943 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.092813 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.111747 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.124584 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.136079 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.146853 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:10Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.181339 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.181372 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.181384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.181403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.181414 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.284906 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.284951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.284961 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.284978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.284993 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.387923 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.387967 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.387980 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.387998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.388011 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.490474 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.490525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.490539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.490558 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.490571 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.503954 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.504046 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.504051 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.504150 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:10 crc kubenswrapper[4712]: E0131 05:40:10.504154 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:10 crc kubenswrapper[4712]: E0131 05:40:10.504255 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:10 crc kubenswrapper[4712]: E0131 05:40:10.504364 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:10 crc kubenswrapper[4712]: E0131 05:40:10.504633 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.510218 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 20:19:14.28189442 +0000 UTC Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.592536 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.592583 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.592598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.592618 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.592631 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.695827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.695888 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.695903 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.695927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.695945 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.798665 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.798732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.798745 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.798765 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.798775 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.902371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.902442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.902455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.902477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:10 crc kubenswrapper[4712]: I0131 05:40:10.902493 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:10Z","lastTransitionTime":"2026-01-31T05:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.004991 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.005038 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.005050 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.005068 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.005079 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.108580 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.108637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.108647 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.108669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.108682 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.211227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.211289 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.211301 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.211321 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.211336 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.313681 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.313724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.313737 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.313757 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.313769 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.416408 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.416447 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.416455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.416469 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.416479 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.510562 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 12:39:27.685671005 +0000 UTC Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.518369 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.518407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.518417 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.518433 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.518443 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.621032 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.621079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.621091 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.621109 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.621121 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.723723 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.723790 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.723810 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.723831 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.723845 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.827525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.827581 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.827601 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.827627 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.827641 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.930006 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.930092 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.930112 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.930142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:11 crc kubenswrapper[4712]: I0131 05:40:11.930162 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:11Z","lastTransitionTime":"2026-01-31T05:40:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.032238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.032291 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.032302 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.032320 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.032335 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.135583 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.135707 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.135726 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.135756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.135774 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.238888 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.238957 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.238975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.239002 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.239024 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.340976 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.341037 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.341060 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.341082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.341100 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.443926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.443968 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.443977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.443991 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.444000 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.503736 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.503815 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:12 crc kubenswrapper[4712]: E0131 05:40:12.503923 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:12 crc kubenswrapper[4712]: E0131 05:40:12.504033 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.503935 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.504084 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:12 crc kubenswrapper[4712]: E0131 05:40:12.504470 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:12 crc kubenswrapper[4712]: E0131 05:40:12.505325 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.511059 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 21:37:03.682329545 +0000 UTC Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.546281 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.546339 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.546349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.546363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.546374 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.648398 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.648477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.648486 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.648500 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.648509 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.750268 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.750315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.750327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.750343 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.750354 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.852467 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.852561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.852573 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.852597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.852614 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.954924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.954964 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.954975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.954990 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:12 crc kubenswrapper[4712]: I0131 05:40:12.954999 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:12Z","lastTransitionTime":"2026-01-31T05:40:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.057666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.057715 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.057728 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.057758 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.057772 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.160273 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.160345 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.160356 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.160374 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.160382 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.262785 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.262815 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.262822 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.262834 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.262842 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.364587 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.364622 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.364631 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.364644 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.364654 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.466256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.466293 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.466304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.466321 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.466331 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.511780 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 13:16:51.050925966 +0000 UTC Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.568612 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.568649 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.568657 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.568670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.568678 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.671358 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.671436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.671462 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.671493 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.671526 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.774341 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.774407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.774428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.774455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.774475 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.876592 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.876635 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.876645 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.876662 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.876674 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.978806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.978847 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.978856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.978872 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:13 crc kubenswrapper[4712]: I0131 05:40:13.978883 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:13Z","lastTransitionTime":"2026-01-31T05:40:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.081273 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.081319 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.081330 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.081348 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.081358 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.183359 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.183401 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.183411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.183427 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.183438 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.286005 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.286046 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.286056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.286071 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.286080 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.388233 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.388304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.388325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.388349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.388370 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.490481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.490556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.490575 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.490606 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.490628 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.503924 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.504024 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.503924 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.504085 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:14 crc kubenswrapper[4712]: E0131 05:40:14.504315 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:14 crc kubenswrapper[4712]: E0131 05:40:14.504524 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:14 crc kubenswrapper[4712]: E0131 05:40:14.504661 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:14 crc kubenswrapper[4712]: E0131 05:40:14.504814 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.512108 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 05:46:04.461439208 +0000 UTC Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.562387 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.590159 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.593525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.593560 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.593570 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.593585 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.593594 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.605394 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.626866 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.639477 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.651733 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.664315 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.679484 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.692678 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.696436 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.696637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.696737 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.696837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.696960 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.707529 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.718506 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.731987 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.750265 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.762241 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.775023 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.784926 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.797090 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:14Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.799516 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.799562 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.799571 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.799589 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.799599 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.901798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.901837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.901846 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.901859 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:14 crc kubenswrapper[4712]: I0131 05:40:14.901868 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:14Z","lastTransitionTime":"2026-01-31T05:40:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.004792 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.004830 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.004838 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.004854 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.004864 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.107416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.107477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.107489 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.107509 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.107520 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.212584 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.212626 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.212638 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.212720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.212732 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.314391 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.314424 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.314434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.314448 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.314459 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.416218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.416250 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.416258 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.416271 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.416279 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.512240 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 14:42:06.105267926 +0000 UTC Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.517790 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.517828 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.517840 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.517857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.517876 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.619935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.619992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.620001 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.620014 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.620023 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.722730 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.722777 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.722789 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.722805 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.722818 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.825445 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.825498 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.825507 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.825523 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.825533 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.927639 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.927682 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.927692 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.927706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:15 crc kubenswrapper[4712]: I0131 05:40:15.927716 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:15Z","lastTransitionTime":"2026-01-31T05:40:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.030043 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.030082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.030093 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.030109 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.030120 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.132650 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.132711 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.132729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.132757 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.132775 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.235037 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.235110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.235133 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.235157 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.235219 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.337805 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.337887 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.337911 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.337940 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.337958 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.440553 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.440591 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.440608 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.440624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.440634 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.503430 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.503479 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.503478 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.503436 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:16 crc kubenswrapper[4712]: E0131 05:40:16.503627 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:16 crc kubenswrapper[4712]: E0131 05:40:16.503713 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:16 crc kubenswrapper[4712]: E0131 05:40:16.503792 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:16 crc kubenswrapper[4712]: E0131 05:40:16.503863 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.512940 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 17:11:42.712033012 +0000 UTC Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.542930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.542964 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.542975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.542991 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.543002 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.644727 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.644808 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.644820 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.644834 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.644847 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.747442 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.747495 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.747511 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.747535 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.747554 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.849921 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.849968 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.849980 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.849996 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.850005 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.952721 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.952757 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.952766 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.952779 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:16 crc kubenswrapper[4712]: I0131 05:40:16.952790 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:16Z","lastTransitionTime":"2026-01-31T05:40:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.055676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.055753 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.055764 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.055784 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.055797 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.158218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.158263 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.158275 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.158292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.158305 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.261822 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.261857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.261866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.261881 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.261890 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.364083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.364454 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.364624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.364786 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.364917 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.467503 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.467545 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.467558 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.467576 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.467590 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.513642 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 10:12:59.838032542 +0000 UTC Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.570139 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.570183 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.570192 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.570204 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.570212 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.672070 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.672104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.672113 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.672126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.672134 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.774061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.774096 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.774107 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.774124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.774135 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.876676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.876730 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.876745 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.876764 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.876775 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.995032 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.995082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.995098 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.995120 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:17 crc kubenswrapper[4712]: I0131 05:40:17.995136 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:17Z","lastTransitionTime":"2026-01-31T05:40:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.097842 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.097905 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.097915 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.097929 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.097938 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.199788 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.199837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.199845 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.199859 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.199869 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.301226 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.301277 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.301290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.301307 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.301318 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.403808 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.403889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.403899 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.403915 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.403925 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.503372 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.503396 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.503466 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.503540 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.503751 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.503799 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.503865 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.504030 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.505719 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.505746 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.505755 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.505767 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.505776 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.514814 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 02:08:43.098100026 +0000 UTC Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.607689 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.607730 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.607739 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.607754 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.607763 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.694560 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.694606 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.694622 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.694640 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.694653 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.707882 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.712104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.712147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.712161 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.712191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.712207 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.723384 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.728122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.728191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.728204 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.728224 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.728236 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.740507 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.743706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.743851 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.743934 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.744009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.744075 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.754797 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.763838 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.763882 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.763892 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.763908 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.763920 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.775996 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:18Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:18 crc kubenswrapper[4712]: E0131 05:40:18.776219 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.778042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.778077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.778090 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.778111 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.778122 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.881228 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.881321 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.881344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.881375 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.881396 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.985390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.985863 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.986021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.986164 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:18 crc kubenswrapper[4712]: I0131 05:40:18.986409 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:18Z","lastTransitionTime":"2026-01-31T05:40:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.089340 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.089825 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.089955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.090100 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.090246 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.192927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.192968 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.192978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.192992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.193004 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.296243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.296626 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.296780 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.296946 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.297115 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.398791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.398821 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.398849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.398863 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.398872 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.501838 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.501888 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.501904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.501922 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.501933 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.515331 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 10:26:13.40599232 +0000 UTC Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.522044 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.605105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.605153 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.605190 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.605210 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.605225 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.708497 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.708548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.708559 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.708575 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.708585 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.811335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.811404 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.811421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.811450 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.811467 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.916227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.916301 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.916322 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.916353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:19 crc kubenswrapper[4712]: I0131 05:40:19.916375 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:19Z","lastTransitionTime":"2026-01-31T05:40:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.021219 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.021285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.021306 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.021331 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.021349 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.124284 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.124344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.124359 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.124382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.124397 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.227530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.227601 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.227623 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.227648 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.227665 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.330727 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.330787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.330800 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.330818 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.330830 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.392129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.392413 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.392366881 +0000 UTC m=+150.486248772 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.433600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.433662 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.433677 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.433699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.433712 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.493816 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.493857 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.493890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.493920 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494004 4712 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494035 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494081 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494097 4712 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494104 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.494078601 +0000 UTC m=+150.587960482 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494158 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.494138582 +0000 UTC m=+150.588020513 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494010 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494225 4712 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494274 4712 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494310 4712 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494367 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.494329377 +0000 UTC m=+150.588211288 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.494402 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.494385738 +0000 UTC m=+150.588267699 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.504035 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.504051 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.504110 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.504235 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.504339 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.504508 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.504641 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:20 crc kubenswrapper[4712]: E0131 05:40:20.504785 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.516203 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 04:44:23.575493444 +0000 UTC Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.536467 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.536502 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.536511 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.536524 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.536536 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.639676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.639715 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.639725 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.639744 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.639758 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.742395 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.742438 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.742448 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.742464 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.742477 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.844884 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.844935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.844947 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.844966 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.844981 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.947543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.947595 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.947609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.947627 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:20 crc kubenswrapper[4712]: I0131 05:40:20.947641 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:20Z","lastTransitionTime":"2026-01-31T05:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.049833 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.049908 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.049942 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.049965 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.049976 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.152759 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.153062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.153238 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.153387 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.153671 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.257337 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.257866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.258056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.258252 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.258424 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.360957 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.360998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.361006 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.361020 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.361029 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.463420 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.463743 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.463895 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.464009 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.464107 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.517095 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 01:13:25.454227494 +0000 UTC Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.567126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.567162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.567193 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.567209 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.567221 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.669933 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.669985 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.669994 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.670010 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.670019 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.772529 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.772608 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.772633 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.772663 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.772684 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.875290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.875369 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.875385 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.875406 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.875421 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.977534 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.977821 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.977981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.978105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:21 crc kubenswrapper[4712]: I0131 05:40:21.978284 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:21Z","lastTransitionTime":"2026-01-31T05:40:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.081317 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.081355 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.081366 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.081382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.081393 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.183984 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.184021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.184049 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.184066 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.184075 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.286325 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.286370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.286378 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.286410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.286420 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.389686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.389751 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.389780 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.389816 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.389841 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.493530 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.493598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.493612 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.493635 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.493649 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.503869 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.503921 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.503939 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:22 crc kubenswrapper[4712]: E0131 05:40:22.504028 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:22 crc kubenswrapper[4712]: E0131 05:40:22.504157 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:22 crc kubenswrapper[4712]: E0131 05:40:22.504293 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.504647 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:22 crc kubenswrapper[4712]: E0131 05:40:22.504791 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.517432 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 06:38:14.535945075 +0000 UTC Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.597095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.597242 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.597264 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.597295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.597316 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.704373 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.704465 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.704491 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.704536 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.704564 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.807652 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.808288 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.808357 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.808449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.808521 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.911818 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.912150 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.912247 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.912318 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:22 crc kubenswrapper[4712]: I0131 05:40:22.912399 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:22Z","lastTransitionTime":"2026-01-31T05:40:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.016004 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.016088 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.016107 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.016142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.016168 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.119130 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.119187 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.119196 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.119212 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.119222 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.222131 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.222205 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.222215 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.222229 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.222242 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.324021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.324076 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.324086 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.324130 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.324144 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.426765 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.426804 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.426813 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.427295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.427332 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.504544 4712 scope.go:117] "RemoveContainer" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.518219 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 01:28:00.436504379 +0000 UTC Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.529646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.529690 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.529700 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.529715 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.529724 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.631900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.632225 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.632257 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.632287 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.632309 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.734623 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.734660 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.734671 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.734686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.734697 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.838420 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.838468 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.838480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.838504 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.838522 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.941731 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.941761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.941769 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.941782 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.941790 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:23Z","lastTransitionTime":"2026-01-31T05:40:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.947160 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/2.log" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.948955 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.949658 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.965295 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:23 crc kubenswrapper[4712]: I0131 05:40:23.986040 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:23Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.005489 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.022623 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.044306 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.044353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.044363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.044377 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.044388 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.054049 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.073016 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.093953 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.108379 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.122575 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.137074 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.146341 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.146398 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.146412 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.146431 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.146444 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.151943 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.164600 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.178926 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.190622 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.207407 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.216823 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"845e5458-df9c-450a-bf59-c256213f6b68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f680f1581bc66aada73195ab573b6eb0949528765387f0fc719365b4323d08cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.230310 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.242924 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.249191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.249257 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.249269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.249295 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.249310 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.351773 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.351877 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.351890 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.351909 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.351921 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.454391 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.454439 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.454449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.454466 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.454476 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.503940 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.504088 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:24 crc kubenswrapper[4712]: E0131 05:40:24.504123 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.504237 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.504279 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:24 crc kubenswrapper[4712]: E0131 05:40:24.504392 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:24 crc kubenswrapper[4712]: E0131 05:40:24.504434 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:24 crc kubenswrapper[4712]: E0131 05:40:24.504486 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.517552 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"845e5458-df9c-450a-bf59-c256213f6b68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f680f1581bc66aada73195ab573b6eb0949528765387f0fc719365b4323d08cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.518597 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 03:53:24.147710883 +0000 UTC Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.534687 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.550133 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.556757 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.556807 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.556819 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.556855 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.556870 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.563563 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.574645 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.587045 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.606882 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.626599 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.640425 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.658238 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.661440 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.661508 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.661518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.661535 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.661547 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.672749 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.691417 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.706615 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.721874 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.741539 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.758706 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.763763 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.763803 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.763813 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.763830 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.763840 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.778361 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.792937 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.867018 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.867062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.867075 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.867094 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.867110 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.954795 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/3.log" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.955993 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/2.log" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.960673 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" exitCode=1 Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.960781 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.960860 4712 scope.go:117] "RemoveContainer" containerID="14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.966443 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:40:24 crc kubenswrapper[4712]: E0131 05:40:24.966843 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.968776 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.968797 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.968806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.968820 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.968830 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:24Z","lastTransitionTime":"2026-01-31T05:40:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.975935 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"845e5458-df9c-450a-bf59-c256213f6b68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f680f1581bc66aada73195ab573b6eb0949528765387f0fc719365b4323d08cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:24 crc kubenswrapper[4712]: I0131 05:40:24.991907 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:24Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.004720 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.021932 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.040691 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.054357 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.067694 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.072619 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.072754 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.072799 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.072854 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.072865 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.084900 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.100560 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.115298 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.132332 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.150224 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.175349 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.177724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.177791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.177809 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.177832 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.177846 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.189099 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.204510 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.221340 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.235095 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.258394 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14a9b13ba62e1cc434e0fb0785e7bc8c83040bd8bbc1b2caf85d8f2d9d7b8f9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:39:55Z\\\",\\\"message\\\":\\\".go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0131 05:39:55.632574 6453 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:39:55.632580 6453 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0131 05:39:55.632599 6453 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:39:55.632607 6453 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0131 05:39:55.632608 6453 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0131 05:39:55.632631 6453 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0131 05:39:55.632695 6453 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0131 05:39:55.632715 6453 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:39:55.632723 6453 factory.go:656] Stopping watch factory\\\\nI0131 05:39:55.632735 6453 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0131 05:39:55.632744 6453 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:39:55.632751 6453 handler.go:208] Removed *v1.Node event handler 7\\\\nI0131 05:39:55.632831 6453 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:39:55.632900 6453 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:39:55.633068 6453 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:24Z\\\",\\\"message\\\":\\\"nNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.542953 6890 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.543477 6890 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0131 05:40:24.544385 6890 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:40:24.544461 6890 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:40:24.544502 6890 factory.go:656] Stopping watch factory\\\\nI0131 05:40:24.544544 6890 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:40:24.544565 6890 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:40:24.585329 6890 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0131 05:40:24.585474 6890 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0131 05:40:24.585601 6890 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:40:24.585719 6890 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:40:24.585884 6890 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:25Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.281349 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.281400 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.281411 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.281428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.281444 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.384616 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.384659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.384669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.384685 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.384695 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.487652 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.487709 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.487723 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.487744 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.487756 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.520453 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 11:54:50.837552826 +0000 UTC Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.589505 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.589548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.589559 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.589576 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.589589 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.692637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.692681 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.692692 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.692709 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.692719 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.795805 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.795844 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.795853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.795866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.795876 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.899248 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.899317 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.899338 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.899371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.899397 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:25Z","lastTransitionTime":"2026-01-31T05:40:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.975732 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/3.log" Jan 31 05:40:25 crc kubenswrapper[4712]: I0131 05:40:25.982218 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:40:25 crc kubenswrapper[4712]: E0131 05:40:25.982624 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004447 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004881 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004919 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.004980 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.018905 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.029746 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"845e5458-df9c-450a-bf59-c256213f6b68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f680f1581bc66aada73195ab573b6eb0949528765387f0fc719365b4323d08cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.046865 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.066492 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.083652 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.102311 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.108064 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.108104 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.108124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.108152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.108197 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.120711 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.137818 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.158213 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.173768 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.191591 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.206351 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.211030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.211087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.211105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.211124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.211139 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.226283 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.242345 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.266093 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:24Z\\\",\\\"message\\\":\\\"nNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.542953 6890 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.543477 6890 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0131 05:40:24.544385 6890 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:40:24.544461 6890 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:40:24.544502 6890 factory.go:656] Stopping watch factory\\\\nI0131 05:40:24.544544 6890 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:40:24.544565 6890 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:40:24.585329 6890 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0131 05:40:24.585474 6890 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0131 05:40:24.585601 6890 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:40:24.585719 6890 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:40:24.585884 6890 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.279738 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.298224 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:26Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.315531 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.315718 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.315775 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.315836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.315938 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.419277 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.419568 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.419638 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.419701 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.419767 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.503618 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.503614 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.503712 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:26 crc kubenswrapper[4712]: E0131 05:40:26.504372 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:26 crc kubenswrapper[4712]: E0131 05:40:26.504056 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.503896 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:26 crc kubenswrapper[4712]: E0131 05:40:26.504563 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:26 crc kubenswrapper[4712]: E0131 05:40:26.504624 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.520781 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:05:20.567776864 +0000 UTC Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.522699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.522729 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.522744 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.522761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.522774 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.625789 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.626126 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.626290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.626439 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.626555 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.728642 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.728682 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.728691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.728704 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.728713 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.835114 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.835165 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.835207 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.835227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.835242 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.938609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.938647 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.938659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.938675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:26 crc kubenswrapper[4712]: I0131 05:40:26.938686 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:26Z","lastTransitionTime":"2026-01-31T05:40:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.042977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.043019 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.043030 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.043048 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.043059 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.145097 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.145147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.145158 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.145208 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.145221 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.247486 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.247531 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.247542 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.247559 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.247569 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.351092 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.351139 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.351151 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.351167 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.351193 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.454328 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.454371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.454384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.454400 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.454411 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.521621 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 22:08:08.227672899 +0000 UTC Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.557280 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.557591 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.557664 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.557750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.557826 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.660938 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.661047 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.661070 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.661114 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.661137 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.764702 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.765022 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.765096 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.765166 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.765260 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.867675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.867741 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.867768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.867800 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.867821 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.971771 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.971813 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.971821 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.971844 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:27 crc kubenswrapper[4712]: I0131 05:40:27.971854 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:27Z","lastTransitionTime":"2026-01-31T05:40:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.074163 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.074480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.074693 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.074763 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.074829 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.178122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.178197 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.178209 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.178316 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.178331 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.281910 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.281955 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.281965 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.281981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.281991 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.385830 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.385884 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.385913 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.385932 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.385946 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.488385 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.488425 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.488433 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.488457 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.488468 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.503744 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:28 crc kubenswrapper[4712]: E0131 05:40:28.503867 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.504071 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:28 crc kubenswrapper[4712]: E0131 05:40:28.504134 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.504414 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.504472 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:28 crc kubenswrapper[4712]: E0131 05:40:28.504570 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:28 crc kubenswrapper[4712]: E0131 05:40:28.504633 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.523240 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 18:20:04.609828062 +0000 UTC Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.590345 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.590386 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.590397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.590413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.590428 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.692579 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.692624 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.692638 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.692655 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.692665 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.794784 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.794838 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.794848 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.794865 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.794896 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.897597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.897637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.897646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.897662 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:28 crc kubenswrapper[4712]: I0131 05:40:28.897671 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:28Z","lastTransitionTime":"2026-01-31T05:40:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.000216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.000257 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.000291 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.000308 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.000319 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.102768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.102801 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.102809 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.102823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.102833 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.114125 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.114157 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.114167 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.114216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.114227 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.127332 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.131699 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.131742 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.131752 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.131776 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.131815 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.147327 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.150724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.150883 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.150991 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.151110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.151230 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.162929 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.167606 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.167642 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.167653 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.167670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.167682 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.179108 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.184020 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.184076 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.184099 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.184129 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.184149 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.197977 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:29Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:29 crc kubenswrapper[4712]: E0131 05:40:29.198119 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.206063 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.206462 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.206565 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.206664 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.206744 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.309775 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.309823 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.309836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.309856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.309869 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.411658 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.411691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.411702 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.411718 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.411729 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.514480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.514517 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.514527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.514540 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.514552 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.523665 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 07:00:10.874946757 +0000 UTC Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.616762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.616791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.616798 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.616812 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.616822 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.719870 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.719921 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.719935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.719953 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.719965 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.823093 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.823146 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.823162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.823204 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.823219 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.926918 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.926988 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.927005 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.927031 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:29 crc kubenswrapper[4712]: I0131 05:40:29.927047 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:29Z","lastTransitionTime":"2026-01-31T05:40:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.030799 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.030896 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.030927 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.030961 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.030981 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.134008 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.134083 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.134106 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.134142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.134161 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.237455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.237521 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.237539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.237565 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.237584 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.340388 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.340444 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.340461 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.340514 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.340533 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.443843 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.443904 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.443922 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.443951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.444029 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.505503 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:30 crc kubenswrapper[4712]: E0131 05:40:30.505676 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.505960 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:30 crc kubenswrapper[4712]: E0131 05:40:30.506050 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.506297 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:30 crc kubenswrapper[4712]: E0131 05:40:30.506384 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.506680 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:30 crc kubenswrapper[4712]: E0131 05:40:30.506787 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.541828 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 04:01:24.652783644 +0000 UTC Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.547168 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.547233 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.547245 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.547267 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.547283 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.649926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.649975 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.649992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.650017 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.650034 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.752619 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.752651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.752660 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.752675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.752684 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.855472 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.855613 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.855673 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.855704 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.855731 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.960274 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.960352 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.960372 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.960408 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:30 crc kubenswrapper[4712]: I0131 05:40:30.960429 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:30Z","lastTransitionTime":"2026-01-31T05:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.063421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.063471 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.063486 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.063512 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.063528 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.167724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.167827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.167853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.167879 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.167894 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.272577 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.272676 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.272694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.272732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.272759 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.375483 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.375860 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.375970 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.376066 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.376164 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.478896 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.479416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.479494 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.479610 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.479672 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.542854 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 13:31:13.573728288 +0000 UTC Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.583666 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.584225 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.584441 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.584661 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.584867 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.688926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.689014 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.689033 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.689067 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.689085 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.793211 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.793569 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.793651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.793720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.793784 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.897707 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.897770 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.897787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.897809 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:31 crc kubenswrapper[4712]: I0131 05:40:31.897825 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:31Z","lastTransitionTime":"2026-01-31T05:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.002223 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.002292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.002312 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.002343 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.002374 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.105960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.106053 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.106076 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.106110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.106138 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.210056 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.210114 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.210134 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.210214 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.210257 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.313961 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.314061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.314089 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.314128 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.314146 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.417704 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.418283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.418449 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.418662 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.418899 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.503773 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.503854 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:32 crc kubenswrapper[4712]: E0131 05:40:32.504049 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.504122 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.504386 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:32 crc kubenswrapper[4712]: E0131 05:40:32.504608 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:32 crc kubenswrapper[4712]: E0131 05:40:32.504679 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:32 crc kubenswrapper[4712]: E0131 05:40:32.504743 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.520939 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.521023 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.521047 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.521079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.521099 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.543739 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 20:15:39.074842959 +0000 UTC Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.624329 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.624382 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.624393 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.624410 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.624422 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.728026 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.728079 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.728088 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.728108 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.728119 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.831840 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.831902 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.831926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.831962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.831986 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.935417 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.935477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.935499 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.935528 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:32 crc kubenswrapper[4712]: I0131 05:40:32.935549 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:32Z","lastTransitionTime":"2026-01-31T05:40:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.038276 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.038387 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.038408 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.038438 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.038485 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.141978 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.142498 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.142602 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.142717 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.142801 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.246553 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.246623 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.246642 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.246670 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.246686 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.349430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.349492 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.349502 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.349519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.349529 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.452587 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.452689 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.452720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.452761 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.452794 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.544117 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 08:31:13.332173802 +0000 UTC Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.555682 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.555752 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.555774 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.555803 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.555821 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.658915 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.658989 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.659007 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.659038 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.659060 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.762394 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.762480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.762502 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.762536 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.762557 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.866768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.867347 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.867548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.867733 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.867901 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.972999 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.973087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.973107 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.973142 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:33 crc kubenswrapper[4712]: I0131 05:40:33.973162 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:33Z","lastTransitionTime":"2026-01-31T05:40:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.075781 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.076351 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.076519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.076686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.076918 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.180236 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.180315 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.180335 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.180367 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.180387 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.283364 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.283836 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.284005 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.284147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.284328 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.386756 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.386815 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.386834 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.386859 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.386879 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.489789 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.489866 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.489890 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.489923 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.489943 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.504115 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.504243 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.504273 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:34 crc kubenswrapper[4712]: E0131 05:40:34.504405 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.504537 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:34 crc kubenswrapper[4712]: E0131 05:40:34.504847 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:34 crc kubenswrapper[4712]: E0131 05:40:34.505785 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:34 crc kubenswrapper[4712]: E0131 05:40:34.506370 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.523825 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"845e5458-df9c-450a-bf59-c256213f6b68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f680f1581bc66aada73195ab573b6eb0949528765387f0fc719365b4323d08cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f259e1020526e3ebac13d5ec99950bd8e1aa184d58839b018184755c567b387\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.537574 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaac0246-673f-4670-8b7b-c27ecaf0d847\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7243df068e85bff0fb91e44e5269b4f7e26ce04806f5abdd4a7316310a5552e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lqspq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6hwmd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.544966 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 07:54:40.320708413 +0000 UTC Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.551065 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-65vvn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b4a1f052-e167-4d29-ba2d-82b193736f59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba2d80cc81c685391b6e9dc20daf4c4f83a0c0c34eac03be2a03e717cacb216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dv65h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-65vvn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.567219 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://867d756a7b416d15bee21aca73d904dd392f150b023a0dad587b15c59373f537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.587667 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.593217 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.593292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.593305 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.593323 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.593337 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.607728 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.631222 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36a43db9-04c0-48fb-8ee1-6e77c26672b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae9ff7b8942b17e8a3db1cc9ed31ef6ede95d6bb468a25c2cd6f8f580f1b0320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a97f2e6ce66ba8fc6d642f87d7a8989ee96c92486967d861cb223fcff14838ae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6297fcd05149db27e4abfab4b252e32e84c97875b74b448d1b12108de4aa5f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275fa2806cf0a456acc9616da30a78c11dfe859f360cd122a5a0c672a456b694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7791a47bad9987e93e63d93cfe60db7943c79889d39d33b67990c3733f9f59c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d265383bd6d9ce872093a1f7824cd2f8516a8e4ab9e1235a5646fb9fdef770\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6a07425ce07b93e53e2ad6fe35556c6cea542ac08190d8953c51c97548a65e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwh7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sn2n4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.646227 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.664578 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.683297 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce9a9d819dbb5badb36a242735aa9018f4c15fca38ed8d965164fc6074696927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.696732 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.696832 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.696848 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.696894 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.696910 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.705314 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://daa6fa239070ec3f689e3a1d4fd14e42fac98731ab8323efad4e0b2d98d3d45d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7958d9b48e960e1cbd7a3160dc2d08edc6dc3e7d03d415d72f819dac55377a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.732258 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.752695 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zg9rz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83251fc5-49c0-48ed-b6a1-debf4fb30255\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fdc306768797acd2850f2653f5d4ea8898dbc3cb69b179cd9bc68c6eaeff3de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z4bzd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:17Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zg9rz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.775740 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.792851 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a64404a-eff0-4714-83bd-78da3bb0616b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dec8c999b4e767795d99ba9d1b0a11fa39b18d759be57507a0b709aab32d5690\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c6b0e7c5e7c1b7249bce5096930b70197c5db764eb1982e8bc66d366473d92e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9208f41a64093010f267b8ef2c0c4ddaedb51f2c216f6fa285cf59066ce072aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.799582 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.799655 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.799671 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.799720 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.799735 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.810903 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f896863-cc9b-4515-b88b-735b5e6e06fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0a6a61ab3c178bb93164029f8c92c230e14007da09257dec385007560cb0764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2086cafff72b06d64fbc70d9e430eac698c0569c79d05f42b8042e34758f51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccd5426d0cce8eefc9a5cea4b61ae16e87d1d94fe93b29a0155f7fa186cde0a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://451e2323b39387506ca95e11f0cb8d409ced26766e04745e9f10e480857a3709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.843034 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:24Z\\\",\\\"message\\\":\\\"nNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.542953 6890 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0131 05:40:24.543477 6890 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0131 05:40:24.544385 6890 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0131 05:40:24.544461 6890 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0131 05:40:24.544502 6890 factory.go:656] Stopping watch factory\\\\nI0131 05:40:24.544544 6890 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0131 05:40:24.544565 6890 handler.go:208] Removed *v1.Node event handler 2\\\\nI0131 05:40:24.585329 6890 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0131 05:40:24.585474 6890 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0131 05:40:24.585601 6890 ovnkube.go:599] Stopped ovnkube\\\\nI0131 05:40:24.585719 6890 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0131 05:40:24.585884 6890 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:40:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xdhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6r6bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.867086 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5svzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03cf41cd-8606-4e98-a290-023fbe7d0956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pn875\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:32Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5svzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:34Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.903490 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.903573 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.903595 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.903621 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:34 crc kubenswrapper[4712]: I0131 05:40:34.903638 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:34Z","lastTransitionTime":"2026-01-31T05:40:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.005901 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.005941 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.005951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.005965 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.005975 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.109234 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.109317 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.109339 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.109370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.109388 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.213042 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.213130 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.213150 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.213214 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.213239 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.315935 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.315998 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.316008 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.316022 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.316030 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.420018 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.420101 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.420123 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.420154 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.420206 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.523902 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.523995 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.524015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.524048 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.524070 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.546245 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 17:00:05.008812072 +0000 UTC Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.628597 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.628651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.628665 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.628693 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.628708 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.732852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.732951 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.732979 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.733018 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.733049 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.836737 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.836811 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.836828 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.836856 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.836878 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.941054 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.941225 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.941268 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.941367 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:35 crc kubenswrapper[4712]: I0131 05:40:35.941437 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:35Z","lastTransitionTime":"2026-01-31T05:40:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.045973 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.046109 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.046243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.046332 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.046474 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.152369 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.152510 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.152533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.152595 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.152621 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.255896 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.255960 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.255979 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.256011 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.256036 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.359556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.359687 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.359713 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.359752 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.359781 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.463512 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.463584 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.463602 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.463632 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.463651 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.490056 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.490438 4712 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.490623 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs podName:03cf41cd-8606-4e98-a290-023fbe7d0956 nodeName:}" failed. No retries permitted until 2026-01-31 05:41:40.49057566 +0000 UTC m=+166.584457661 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs") pod "network-metrics-daemon-5svzb" (UID: "03cf41cd-8606-4e98-a290-023fbe7d0956") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.503587 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.503681 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.503725 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.503775 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.503889 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.503928 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.504063 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:36 crc kubenswrapper[4712]: E0131 05:40:36.504259 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.547104 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 04:28:27.234637751 +0000 UTC Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.568814 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.568891 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.568913 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.568982 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.569005 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.671930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.672015 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.672027 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.672049 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.672071 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.774362 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.774437 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.774456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.774485 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.774506 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.877317 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.877370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.877397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.877414 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.877425 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.980412 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.980501 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.980523 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.980556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:36 crc kubenswrapper[4712]: I0131 05:40:36.980576 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:36Z","lastTransitionTime":"2026-01-31T05:40:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.083632 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.083710 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.083723 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.083742 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.083755 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.187518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.187571 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.187584 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.187604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.187619 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.290937 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.291035 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.291052 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.291080 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.291096 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.394501 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.394548 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.394557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.394573 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.394584 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.497480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.497555 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.497576 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.497611 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.497633 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.504548 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:40:37 crc kubenswrapper[4712]: E0131 05:40:37.504885 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.548154 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 10:39:47.66979963 +0000 UTC Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.600790 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.600850 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.600863 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.600885 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.600899 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.703197 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.703259 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.703272 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.703298 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.703314 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.806771 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.806837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.806851 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.806871 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.806884 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.909279 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.909333 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.909344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.909359 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:37 crc kubenswrapper[4712]: I0131 05:40:37.909372 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:37Z","lastTransitionTime":"2026-01-31T05:40:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.013329 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.013404 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.013421 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.013447 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.013464 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.116446 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.116525 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.116543 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.116575 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.116594 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.220136 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.220750 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.221033 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.221321 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.221578 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.325049 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.325605 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.325751 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.325897 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.326059 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.429853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.429977 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.429996 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.430029 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.430051 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.503312 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.503320 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.503645 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:38 crc kubenswrapper[4712]: E0131 05:40:38.503969 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:38 crc kubenswrapper[4712]: E0131 05:40:38.504224 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.504280 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:38 crc kubenswrapper[4712]: E0131 05:40:38.504774 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:38 crc kubenswrapper[4712]: E0131 05:40:38.504872 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.526483 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.534119 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.534202 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.534221 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.534251 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.534274 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.549385 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 08:28:25.676815891 +0000 UTC Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.638428 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.638496 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.638515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.638545 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.638566 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.742462 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.742513 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.742522 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.742540 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.742551 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.846292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.846360 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.846379 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.846409 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.846432 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.949694 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.949766 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.949787 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.949816 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:38 crc kubenswrapper[4712]: I0131 05:40:38.949835 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:38Z","lastTransitionTime":"2026-01-31T05:40:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.052657 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.052708 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.052717 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.052735 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.052745 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.155550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.155650 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.155673 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.155706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.155728 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.261292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.261365 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.261384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.261418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.261443 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.365481 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.365555 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.365574 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.365604 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.365623 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.430985 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.431046 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.431057 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.431138 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.431151 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.447167 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:39Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.453141 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.453260 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.453283 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.453313 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.453335 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.469597 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:39Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.474515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.474593 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.474614 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.474648 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.474672 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.494657 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:39Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.500612 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.500669 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.500682 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.500704 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.500718 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.520613 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:39Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.524965 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.525096 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.525114 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.525143 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.525203 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.539786 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8040d6f3-e51f-4e3b-aeae-54f83f904d83\\\",\\\"systemUUID\\\":\\\"c89c3fa9-398d-448e-b1ac-e77818b7bcd0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:39Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:39 crc kubenswrapper[4712]: E0131 05:40:39.539906 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.542480 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.542539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.542554 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.542578 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.542594 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.549903 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 23:06:55.449720023 +0000 UTC Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.645062 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.645124 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.645138 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.645162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.645204 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.749206 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.749281 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.749306 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.749331 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.749349 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.852296 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.852371 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.852397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.852434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.852459 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.956301 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.956373 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.956390 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.956420 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:39 crc kubenswrapper[4712]: I0131 05:40:39.956440 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:39Z","lastTransitionTime":"2026-01-31T05:40:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.059590 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.059651 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.059665 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.059691 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.059707 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.165078 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.165155 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.165193 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.165218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.165235 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.268643 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.268685 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.268695 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.268711 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.268720 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.372286 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.372346 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.372362 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.372384 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.372400 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.475063 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.475095 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.475105 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.475119 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.475130 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.503711 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.503751 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:40 crc kubenswrapper[4712]: E0131 05:40:40.503852 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.503711 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:40 crc kubenswrapper[4712]: E0131 05:40:40.503987 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.503719 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:40 crc kubenswrapper[4712]: E0131 05:40:40.504158 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:40 crc kubenswrapper[4712]: E0131 05:40:40.504314 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.550768 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 22:43:08.121632959 +0000 UTC Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.578378 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.578478 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.578506 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.578552 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.578581 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.681690 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.681767 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.681785 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.681815 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.681833 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.785026 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.785110 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.785130 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.785162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.785223 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.889319 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.889397 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.889434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.889473 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.889498 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.993336 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.993416 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.993430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.993455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:40 crc kubenswrapper[4712]: I0131 05:40:40.993508 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:40Z","lastTransitionTime":"2026-01-31T05:40:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.097521 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.097585 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.097598 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.097621 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.097635 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.200519 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.200585 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.200594 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.200616 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.200627 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.303762 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.303842 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.303857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.303883 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.303899 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.407659 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.407716 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.407726 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.407745 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.407754 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.509968 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.510021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.510037 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.510061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.510078 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.550943 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 13:01:06.830796816 +0000 UTC Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.614058 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.614131 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.614144 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.614172 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.614203 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.716966 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.717248 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.717292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.717399 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.717493 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.820677 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.820742 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.820753 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.820776 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.820790 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.923563 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.923703 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.923724 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.923757 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:41 crc kubenswrapper[4712]: I0131 05:40:41.923777 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:41Z","lastTransitionTime":"2026-01-31T05:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.027475 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.027557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.027577 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.027609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.027628 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.131086 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.131166 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.131230 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.131269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.131298 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.235068 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.235125 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.235136 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.235158 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.235174 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.338271 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.338353 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.338413 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.338455 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.338481 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.441223 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.441269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.441280 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.441292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.441301 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.503611 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.503665 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:42 crc kubenswrapper[4712]: E0131 05:40:42.504060 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.504148 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:42 crc kubenswrapper[4712]: E0131 05:40:42.504301 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:42 crc kubenswrapper[4712]: E0131 05:40:42.504400 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.504805 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:42 crc kubenswrapper[4712]: E0131 05:40:42.504950 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.546152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.546243 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.546255 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.546269 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.546281 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.551512 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 10:17:37.683683927 +0000 UTC Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.649328 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.649409 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.649423 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.649444 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.649457 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.752592 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.752647 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.752665 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.752695 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.752762 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.856561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.856726 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.856752 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.856788 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.856810 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.960678 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.960821 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.960900 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.960946 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:42 crc kubenswrapper[4712]: I0131 05:40:42.960975 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:42Z","lastTransitionTime":"2026-01-31T05:40:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.064162 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.064365 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.064396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.064435 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.064463 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.167924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.167976 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.167984 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.168001 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.168012 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.271767 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.271835 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.271852 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.271873 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.271887 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.375291 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.375378 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.375403 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.375440 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.375481 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.478806 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.478884 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.478902 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.478936 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.478957 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.552283 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 08:38:52.655076004 +0000 UTC Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.583680 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.583741 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.583753 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.583771 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.583782 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.688232 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.688344 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.688368 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.688407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.688435 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.792759 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.792853 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.792878 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.792911 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.792932 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.898141 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.898224 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.898235 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.898257 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:43 crc kubenswrapper[4712]: I0131 05:40:43.898273 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:43Z","lastTransitionTime":"2026-01-31T05:40:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.001592 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.001671 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.001686 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.001707 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.001720 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.105751 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.105918 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.105950 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.105992 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.106015 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.210479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.210559 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.210578 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.210609 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.210630 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.314981 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.315045 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.315061 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.315087 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.315242 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.419456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.419527 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.419544 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.419567 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.419583 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.503152 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.503357 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.503544 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:44 crc kubenswrapper[4712]: E0131 05:40:44.503536 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.503574 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:44 crc kubenswrapper[4712]: E0131 05:40:44.503856 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:44 crc kubenswrapper[4712]: E0131 05:40:44.504323 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:44 crc kubenswrapper[4712]: E0131 05:40:44.504431 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.523137 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.523216 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.523237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.523267 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.523287 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.526602 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zbfp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4943935-d884-4777-b679-bfabc7235a23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:40:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-31T05:40:07Z\\\",\\\"message\\\":\\\"2026-01-31T05:39:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431\\\\n2026-01-31T05:39:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_03fb0655-ca61-44bc-a33e-68e464c05431 to /host/opt/cni/bin/\\\\n2026-01-31T05:39:22Z [verbose] multus-daemon started\\\\n2026-01-31T05:39:22Z [verbose] Readiness Indicator file check\\\\n2026-01-31T05:40:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:39:18Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:40:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52btf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:18Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zbfp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.543504 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3711b16f-9595-405e-90ea-ecc5eda64737\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0f35b6623c9138f9bf392342475c5564bcaad15c15f17ce7c6c9dd3836077e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02c65337b5a0e7066b7f84fd934e84ba6aefb99b2220b2c8b0852f228c7bb2db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm844\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:39:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-76qlm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.552473 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 20:17:45.807122001 +0000 UTC Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.562279 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac27bb4c-1d85-4ede-88eb-ced27b73d160\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-31T05:39:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0131 05:39:10.304432 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0131 05:39:10.305282 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3906538507/tls.crt::/tmp/serving-cert-3906538507/tls.key\\\\\\\"\\\\nI0131 05:39:16.389391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0131 05:39:16.395981 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0131 05:39:16.396011 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0131 05:39:16.396052 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0131 05:39:16.396058 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0131 05:39:16.412597 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0131 05:39:16.412640 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412658 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0131 05:39:16.412667 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0131 05:39:16.412673 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0131 05:39:16.412678 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0131 05:39:16.412684 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0131 05:39:16.413047 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0131 05:39:16.425653 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.592461 4712 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec6a14fe-555f-4dd9-9bfe-923b498a6f6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:39:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-31T05:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41f16067f8dcb0b1dd6d606dc36e1f5c9e3403d17ada82c3f0f7138e32463b08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://022c1b2249e1b55cfd29c0fc26b99409083fc1a3aef0f3341ed2ba8fe6d265b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4850c5ecbc43a437bd5bc09f5a26e2eac91fb154cb1d100aebb3a1cc1a0fe29d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b03aa24bcbd78ac9ccf8feb3d97e960ac8fddd866101edc25a7f1a58286e1f72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:39:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61fc9c71922060085ef4fa22067f9ac4ba3cbcc98dee0e71537dcc5eb7134735\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-31T05:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80b2e4d25779ae7c59a18cbabda676415676890bce80eb3aa959e3f4d1eba14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80b2e4d25779ae7c59a18cbabda676415676890bce80eb3aa959e3f4d1eba14e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e51516f5525b7456fd0251a69d2f934a667ccc69ce247c09ba499e646502afd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e51516f5525b7456fd0251a69d2f934a667ccc69ce247c09ba499e646502afd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bddfcecea38888121ec78264e2e5a43a71fb0eae81b0e16e54be33d80d997ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bddfcecea38888121ec78264e2e5a43a71fb0eae81b0e16e54be33d80d997ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-31T05:38:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-31T05:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-31T05:38:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-31T05:40:44Z is after 2025-08-24T17:21:41Z" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.626260 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.626388 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.626412 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.626446 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.626465 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.690005 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-zg9rz" podStartSLOduration=87.689942072 podStartE2EDuration="1m27.689942072s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.688806433 +0000 UTC m=+110.782688274" watchObservedRunningTime="2026-01-31 05:40:44.689942072 +0000 UTC m=+110.783823923" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.730327 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.730418 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.730433 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.730456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.730473 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.733009 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=87.732992455 podStartE2EDuration="1m27.732992455s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.713039323 +0000 UTC m=+110.806921184" watchObservedRunningTime="2026-01-31 05:40:44.732992455 +0000 UTC m=+110.826874306" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.733420 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=56.733413826 podStartE2EDuration="56.733413826s" podCreationTimestamp="2026-01-31 05:39:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.733105858 +0000 UTC m=+110.826987699" watchObservedRunningTime="2026-01-31 05:40:44.733413826 +0000 UTC m=+110.827295677" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.795425 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=25.795396066 podStartE2EDuration="25.795396066s" podCreationTimestamp="2026-01-31 05:40:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.794877123 +0000 UTC m=+110.888758964" watchObservedRunningTime="2026-01-31 05:40:44.795396066 +0000 UTC m=+110.889277907" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.814718 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podStartSLOduration=87.814688092 podStartE2EDuration="1m27.814688092s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.814402875 +0000 UTC m=+110.908284726" watchObservedRunningTime="2026-01-31 05:40:44.814688092 +0000 UTC m=+110.908569933" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.832766 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.832848 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.832867 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.832896 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.832918 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.869943 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-65vvn" podStartSLOduration=87.869910871 podStartE2EDuration="1m27.869910871s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.831820393 +0000 UTC m=+110.925702234" watchObservedRunningTime="2026-01-31 05:40:44.869910871 +0000 UTC m=+110.963792712" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.935805 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.935872 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.935886 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.935931 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.935943 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:44Z","lastTransitionTime":"2026-01-31T05:40:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:44 crc kubenswrapper[4712]: I0131 05:40:44.944748 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-sn2n4" podStartSLOduration=87.944722604 podStartE2EDuration="1m27.944722604s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:44.943825801 +0000 UTC m=+111.037707652" watchObservedRunningTime="2026-01-31 05:40:44.944722604 +0000 UTC m=+111.038604445" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.039533 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.039583 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.039594 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.039612 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.039622 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.142477 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.142529 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.142538 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.142556 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.142567 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.246570 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.246860 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.246930 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.247055 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.247142 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.350262 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.350365 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.350400 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.350439 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.350483 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.454601 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.454668 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.454682 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.454706 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.454726 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.553156 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 11:24:48.879464233 +0000 UTC Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.558333 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.558412 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.558434 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.558464 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.558488 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.662822 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.662903 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.662924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.662959 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.662983 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.767077 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.767227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.767256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.767298 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.767328 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.870684 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.871285 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.871304 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.871329 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.871347 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.975586 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.975658 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.975668 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.975685 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:45 crc kubenswrapper[4712]: I0131 05:40:45.975716 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:45Z","lastTransitionTime":"2026-01-31T05:40:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.079055 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.079147 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.079210 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.079251 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.079277 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.183153 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.183227 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.183237 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.183260 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.183275 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.287557 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.287637 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.287652 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.287675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.287689 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.391072 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.391135 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.391145 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.391170 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.391197 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.495034 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.495098 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.495113 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.495138 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.495156 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.503387 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.503430 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.503387 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:46 crc kubenswrapper[4712]: E0131 05:40:46.503544 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.503453 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:46 crc kubenswrapper[4712]: E0131 05:40:46.503693 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:46 crc kubenswrapper[4712]: E0131 05:40:46.503821 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:46 crc kubenswrapper[4712]: E0131 05:40:46.503867 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.554360 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:58:28.864740206 +0000 UTC Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.599299 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.599363 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.599377 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.599402 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.599418 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.703313 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.703827 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.703962 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.704097 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.704232 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.808006 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.808063 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.808082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.808108 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.808126 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.912456 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.912521 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.912539 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.912889 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:46 crc kubenswrapper[4712]: I0131 05:40:46.912927 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:46Z","lastTransitionTime":"2026-01-31T05:40:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.016582 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.016693 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.016714 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.016779 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.016799 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.120479 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.120574 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.120593 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.121154 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.121262 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.225240 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.225323 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.225342 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.225370 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.225393 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.327646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.327723 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.327743 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.327768 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.327785 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.431483 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.431560 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.431584 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.431791 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.431864 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.535055 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.535133 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.535144 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.535171 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.535228 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.555212 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 14:07:47.113645928 +0000 UTC Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.638492 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.638561 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.638572 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.638589 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.638603 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.741760 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.741826 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.741849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.741877 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.741899 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.846196 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.846256 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.846268 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.846290 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.846302 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.950027 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.950102 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.950122 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.950152 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:47 crc kubenswrapper[4712]: I0131 05:40:47.950198 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:47Z","lastTransitionTime":"2026-01-31T05:40:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.053316 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.053396 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.053407 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.053430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.053446 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.157837 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.157924 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.157936 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.157961 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.157975 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.262433 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.262570 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.262589 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.262616 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.262636 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.366451 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.366515 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.366531 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.366558 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.366572 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.469824 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.470292 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.470430 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.470633 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.470782 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.503487 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.503539 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.503626 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:48 crc kubenswrapper[4712]: E0131 05:40:48.503716 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:48 crc kubenswrapper[4712]: E0131 05:40:48.503850 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:48 crc kubenswrapper[4712]: E0131 05:40:48.503949 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.504219 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:48 crc kubenswrapper[4712]: E0131 05:40:48.504449 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.556523 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 12:59:15.694760601 +0000 UTC Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.573936 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.574021 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.574047 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.574082 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.574106 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.677785 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.677842 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.677857 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.677877 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.677893 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.781103 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.781166 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.781191 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.781214 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.781229 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.885127 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.885629 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.885702 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.885795 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.885883 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.989507 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.989581 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.989613 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.989640 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:48 crc kubenswrapper[4712]: I0131 05:40:48.989654 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:48Z","lastTransitionTime":"2026-01-31T05:40:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.093429 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.093518 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.093537 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.093569 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.093590 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.197708 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.197795 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.197817 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.197849 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.197871 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.302550 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.302622 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.302646 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.302675 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.302695 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.406459 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.406546 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.406567 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.406600 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.406625 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.504354 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:40:49 crc kubenswrapper[4712]: E0131 05:40:49.505130 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.510012 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.510100 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.510123 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.510163 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.510231 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.558304 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 00:42:38.052836622 +0000 UTC Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.613775 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.613875 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.613894 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.613926 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.613949 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.716854 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.716906 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.716919 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.716948 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.716965 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.787069 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.787135 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.787157 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.787218 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.787236 4712 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-31T05:40:49Z","lastTransitionTime":"2026-01-31T05:40:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.853721 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s"] Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.854415 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.859105 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.859268 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.859347 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.859395 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.905367 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-zbfp7" podStartSLOduration=92.905330607 podStartE2EDuration="1m32.905330607s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:49.904719631 +0000 UTC m=+115.998601472" watchObservedRunningTime="2026-01-31 05:40:49.905330607 +0000 UTC m=+115.999212478" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.954699 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.954849 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.954980 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e627295-0fc9-46f0-b63f-1a23a6178281-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.955010 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e627295-0fc9-46f0-b63f-1a23a6178281-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.955069 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e627295-0fc9-46f0-b63f-1a23a6178281-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.960298 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-76qlm" podStartSLOduration=91.960266469 podStartE2EDuration="1m31.960266469s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:49.927370511 +0000 UTC m=+116.021252392" watchObservedRunningTime="2026-01-31 05:40:49.960266469 +0000 UTC m=+116.054148310" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.961588 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=93.961578932 podStartE2EDuration="1m33.961578932s" podCreationTimestamp="2026-01-31 05:39:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:49.959981852 +0000 UTC m=+116.053863713" watchObservedRunningTime="2026-01-31 05:40:49.961578932 +0000 UTC m=+116.055460773" Jan 31 05:40:49 crc kubenswrapper[4712]: I0131 05:40:49.999807 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=11.999779133 podStartE2EDuration="11.999779133s" podCreationTimestamp="2026-01-31 05:40:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:49.998080041 +0000 UTC m=+116.091961912" watchObservedRunningTime="2026-01-31 05:40:49.999779133 +0000 UTC m=+116.093660974" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.056334 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.056566 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.056886 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.057112 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/4e627295-0fc9-46f0-b63f-1a23a6178281-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.057445 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e627295-0fc9-46f0-b63f-1a23a6178281-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.057540 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e627295-0fc9-46f0-b63f-1a23a6178281-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.057640 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e627295-0fc9-46f0-b63f-1a23a6178281-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.058879 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e627295-0fc9-46f0-b63f-1a23a6178281-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.066471 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e627295-0fc9-46f0-b63f-1a23a6178281-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.080362 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e627295-0fc9-46f0-b63f-1a23a6178281-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g6n2s\" (UID: \"4e627295-0fc9-46f0-b63f-1a23a6178281\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.185501 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.503391 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.503474 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.503407 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:50 crc kubenswrapper[4712]: E0131 05:40:50.503654 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.503690 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:50 crc kubenswrapper[4712]: E0131 05:40:50.503844 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:50 crc kubenswrapper[4712]: E0131 05:40:50.503971 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:50 crc kubenswrapper[4712]: E0131 05:40:50.504160 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.559389 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:29:28.374213706 +0000 UTC Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.561031 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 31 05:40:50 crc kubenswrapper[4712]: I0131 05:40:50.570118 4712 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 31 05:40:51 crc kubenswrapper[4712]: I0131 05:40:51.095819 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" event={"ID":"4e627295-0fc9-46f0-b63f-1a23a6178281","Type":"ContainerStarted","Data":"aedc37ed89035ea274c6e65e0366453f3ad0c5edf0fe80d187f6dec1c93c05b2"} Jan 31 05:40:51 crc kubenswrapper[4712]: I0131 05:40:51.096100 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" event={"ID":"4e627295-0fc9-46f0-b63f-1a23a6178281","Type":"ContainerStarted","Data":"b426f7c8e69dee9986650ab685f4f289439fb1a454ce8b3166b5c74d3004b5ae"} Jan 31 05:40:51 crc kubenswrapper[4712]: I0131 05:40:51.114979 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g6n2s" podStartSLOduration=94.114952018 podStartE2EDuration="1m34.114952018s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:40:51.113825381 +0000 UTC m=+117.207707232" watchObservedRunningTime="2026-01-31 05:40:51.114952018 +0000 UTC m=+117.208833879" Jan 31 05:40:52 crc kubenswrapper[4712]: I0131 05:40:52.503429 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:52 crc kubenswrapper[4712]: E0131 05:40:52.503578 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:52 crc kubenswrapper[4712]: I0131 05:40:52.503675 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:52 crc kubenswrapper[4712]: I0131 05:40:52.503788 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:52 crc kubenswrapper[4712]: E0131 05:40:52.503875 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:52 crc kubenswrapper[4712]: I0131 05:40:52.503935 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:52 crc kubenswrapper[4712]: E0131 05:40:52.504141 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:52 crc kubenswrapper[4712]: E0131 05:40:52.504302 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.484602 4712 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 31 05:40:54 crc kubenswrapper[4712]: I0131 05:40:54.503255 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:54 crc kubenswrapper[4712]: I0131 05:40:54.503278 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:54 crc kubenswrapper[4712]: I0131 05:40:54.503395 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.504131 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:54 crc kubenswrapper[4712]: I0131 05:40:54.504154 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.504354 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.504503 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.504645 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:54 crc kubenswrapper[4712]: E0131 05:40:54.623195 4712 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.113929 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/1.log" Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.114741 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/0.log" Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.114830 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4943935-d884-4777-b679-bfabc7235a23" containerID="1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6" exitCode=1 Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.114888 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerDied","Data":"1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6"} Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.114952 4712 scope.go:117] "RemoveContainer" containerID="0574a7a5518ecadfd26c9ffed7c6c1d4e4e2a69508efbe28ac2dbeb52e29cd25" Jan 31 05:40:55 crc kubenswrapper[4712]: I0131 05:40:55.115808 4712 scope.go:117] "RemoveContainer" containerID="1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6" Jan 31 05:40:55 crc kubenswrapper[4712]: E0131 05:40:55.116231 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-zbfp7_openshift-multus(f4943935-d884-4777-b679-bfabc7235a23)\"" pod="openshift-multus/multus-zbfp7" podUID="f4943935-d884-4777-b679-bfabc7235a23" Jan 31 05:40:56 crc kubenswrapper[4712]: I0131 05:40:56.121338 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/1.log" Jan 31 05:40:56 crc kubenswrapper[4712]: I0131 05:40:56.503485 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:56 crc kubenswrapper[4712]: I0131 05:40:56.503602 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:56 crc kubenswrapper[4712]: I0131 05:40:56.503629 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:56 crc kubenswrapper[4712]: E0131 05:40:56.503599 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:56 crc kubenswrapper[4712]: E0131 05:40:56.503665 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:56 crc kubenswrapper[4712]: I0131 05:40:56.503689 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:56 crc kubenswrapper[4712]: E0131 05:40:56.503711 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:56 crc kubenswrapper[4712]: E0131 05:40:56.503826 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:58 crc kubenswrapper[4712]: I0131 05:40:58.503286 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:40:58 crc kubenswrapper[4712]: I0131 05:40:58.503336 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:40:58 crc kubenswrapper[4712]: E0131 05:40:58.503469 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:40:58 crc kubenswrapper[4712]: I0131 05:40:58.503478 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:40:58 crc kubenswrapper[4712]: I0131 05:40:58.503603 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:40:58 crc kubenswrapper[4712]: E0131 05:40:58.503645 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:40:58 crc kubenswrapper[4712]: E0131 05:40:58.503690 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:40:58 crc kubenswrapper[4712]: E0131 05:40:58.504100 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:40:59 crc kubenswrapper[4712]: E0131 05:40:59.625034 4712 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 31 05:41:00 crc kubenswrapper[4712]: I0131 05:41:00.503070 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:00 crc kubenswrapper[4712]: E0131 05:41:00.503340 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:00 crc kubenswrapper[4712]: I0131 05:41:00.503395 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:00 crc kubenswrapper[4712]: I0131 05:41:00.503432 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:00 crc kubenswrapper[4712]: I0131 05:41:00.503458 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:00 crc kubenswrapper[4712]: E0131 05:41:00.503902 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:00 crc kubenswrapper[4712]: E0131 05:41:00.504028 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:00 crc kubenswrapper[4712]: E0131 05:41:00.504101 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:00 crc kubenswrapper[4712]: I0131 05:41:00.504453 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:41:00 crc kubenswrapper[4712]: E0131 05:41:00.504680 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6r6bn_openshift-ovn-kubernetes(2f522e2e-c0c8-44a7-b834-ac367dba0c9c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" Jan 31 05:41:02 crc kubenswrapper[4712]: I0131 05:41:02.503545 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:02 crc kubenswrapper[4712]: I0131 05:41:02.503545 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:02 crc kubenswrapper[4712]: E0131 05:41:02.503788 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:02 crc kubenswrapper[4712]: I0131 05:41:02.503568 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:02 crc kubenswrapper[4712]: E0131 05:41:02.503876 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:02 crc kubenswrapper[4712]: E0131 05:41:02.503699 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:02 crc kubenswrapper[4712]: I0131 05:41:02.503550 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:02 crc kubenswrapper[4712]: E0131 05:41:02.504009 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:04 crc kubenswrapper[4712]: I0131 05:41:04.503513 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:04 crc kubenswrapper[4712]: I0131 05:41:04.503613 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:04 crc kubenswrapper[4712]: I0131 05:41:04.503613 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:04 crc kubenswrapper[4712]: I0131 05:41:04.503640 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:04 crc kubenswrapper[4712]: E0131 05:41:04.503720 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:04 crc kubenswrapper[4712]: E0131 05:41:04.505773 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:04 crc kubenswrapper[4712]: E0131 05:41:04.505902 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:04 crc kubenswrapper[4712]: E0131 05:41:04.505991 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:04 crc kubenswrapper[4712]: E0131 05:41:04.625708 4712 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 31 05:41:06 crc kubenswrapper[4712]: I0131 05:41:06.504116 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:06 crc kubenswrapper[4712]: I0131 05:41:06.504503 4712 scope.go:117] "RemoveContainer" containerID="1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6" Jan 31 05:41:06 crc kubenswrapper[4712]: I0131 05:41:06.504545 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:06 crc kubenswrapper[4712]: E0131 05:41:06.504737 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:06 crc kubenswrapper[4712]: I0131 05:41:06.504345 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:06 crc kubenswrapper[4712]: I0131 05:41:06.504378 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:06 crc kubenswrapper[4712]: E0131 05:41:06.507158 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:06 crc kubenswrapper[4712]: E0131 05:41:06.507265 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:06 crc kubenswrapper[4712]: E0131 05:41:06.508083 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:07 crc kubenswrapper[4712]: I0131 05:41:07.169150 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/1.log" Jan 31 05:41:07 crc kubenswrapper[4712]: I0131 05:41:07.169518 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerStarted","Data":"d90ab44100fbd461ca9ec5eb5b37bded0b506688933f0dd115ab5f7d8779ceab"} Jan 31 05:41:08 crc kubenswrapper[4712]: I0131 05:41:08.503067 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:08 crc kubenswrapper[4712]: I0131 05:41:08.503089 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:08 crc kubenswrapper[4712]: I0131 05:41:08.503135 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:08 crc kubenswrapper[4712]: E0131 05:41:08.504421 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:08 crc kubenswrapper[4712]: E0131 05:41:08.504269 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:08 crc kubenswrapper[4712]: I0131 05:41:08.503232 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:08 crc kubenswrapper[4712]: E0131 05:41:08.504698 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:08 crc kubenswrapper[4712]: E0131 05:41:08.504782 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:09 crc kubenswrapper[4712]: E0131 05:41:09.627127 4712 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 31 05:41:10 crc kubenswrapper[4712]: I0131 05:41:10.503752 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:10 crc kubenswrapper[4712]: I0131 05:41:10.503869 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:10 crc kubenswrapper[4712]: I0131 05:41:10.503940 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:10 crc kubenswrapper[4712]: I0131 05:41:10.504097 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:10 crc kubenswrapper[4712]: E0131 05:41:10.504806 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:10 crc kubenswrapper[4712]: E0131 05:41:10.504938 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:10 crc kubenswrapper[4712]: E0131 05:41:10.505123 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:10 crc kubenswrapper[4712]: E0131 05:41:10.505317 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:12 crc kubenswrapper[4712]: I0131 05:41:12.503052 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:12 crc kubenswrapper[4712]: I0131 05:41:12.503053 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:12 crc kubenswrapper[4712]: E0131 05:41:12.503992 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:12 crc kubenswrapper[4712]: I0131 05:41:12.503217 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:12 crc kubenswrapper[4712]: I0131 05:41:12.503121 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:12 crc kubenswrapper[4712]: E0131 05:41:12.504358 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:12 crc kubenswrapper[4712]: E0131 05:41:12.504553 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:12 crc kubenswrapper[4712]: E0131 05:41:12.504714 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:14 crc kubenswrapper[4712]: I0131 05:41:14.514657 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:14 crc kubenswrapper[4712]: I0131 05:41:14.514736 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:14 crc kubenswrapper[4712]: I0131 05:41:14.514739 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:14 crc kubenswrapper[4712]: I0131 05:41:14.515102 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:14 crc kubenswrapper[4712]: E0131 05:41:14.517686 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:14 crc kubenswrapper[4712]: E0131 05:41:14.517899 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:14 crc kubenswrapper[4712]: E0131 05:41:14.517763 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:14 crc kubenswrapper[4712]: E0131 05:41:14.517824 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:14 crc kubenswrapper[4712]: E0131 05:41:14.629241 4712 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 31 05:41:15 crc kubenswrapper[4712]: I0131 05:41:15.503948 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.201404 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/3.log" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.203667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerStarted","Data":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.204002 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.231431 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podStartSLOduration=118.231417039 podStartE2EDuration="1m58.231417039s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:16.230078154 +0000 UTC m=+142.323959995" watchObservedRunningTime="2026-01-31 05:41:16.231417039 +0000 UTC m=+142.325298870" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.482475 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5svzb"] Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.483085 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:16 crc kubenswrapper[4712]: E0131 05:41:16.483297 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.504093 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.504197 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:16 crc kubenswrapper[4712]: I0131 05:41:16.504093 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:16 crc kubenswrapper[4712]: E0131 05:41:16.504360 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:16 crc kubenswrapper[4712]: E0131 05:41:16.504478 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:16 crc kubenswrapper[4712]: E0131 05:41:16.504620 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:18 crc kubenswrapper[4712]: I0131 05:41:18.503569 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:18 crc kubenswrapper[4712]: I0131 05:41:18.503674 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:18 crc kubenswrapper[4712]: I0131 05:41:18.503697 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:18 crc kubenswrapper[4712]: E0131 05:41:18.503814 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5svzb" podUID="03cf41cd-8606-4e98-a290-023fbe7d0956" Jan 31 05:41:18 crc kubenswrapper[4712]: I0131 05:41:18.503854 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:18 crc kubenswrapper[4712]: E0131 05:41:18.503966 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 31 05:41:18 crc kubenswrapper[4712]: E0131 05:41:18.504439 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 31 05:41:18 crc kubenswrapper[4712]: E0131 05:41:18.504523 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.315555 4712 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.366625 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.367463 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.369651 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-h8q6s"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.370717 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.371675 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.371919 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.371979 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.372312 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.372463 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.373655 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.373880 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.374303 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.375030 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.375777 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.381670 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.381836 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.383232 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.385874 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.386655 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.386957 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rzdlg"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.387792 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.390114 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6ctqs"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.398725 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.401564 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.402341 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.403539 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.403465 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404274 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404587 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404622 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404649 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404694 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.404654 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.405230 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.405361 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.406008 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.406091 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.421086 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.422283 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.422375 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.422430 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.422584 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.424305 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zsktt"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.424955 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.425424 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.425590 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.425426 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-client\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.425930 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426038 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426110 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-serving-cert\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426194 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gwcw\" (UniqueName: \"kubernetes.io/projected/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-kube-api-access-8gwcw\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426226 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426244 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426290 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1403369e-fea9-4c82-9432-af21d937566c-serving-cert\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426606 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-dir\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426669 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-serving-cert\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426718 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rgl5\" (UniqueName: \"kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426744 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-service-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426777 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426925 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n6k8\" (UniqueName: \"kubernetes.io/projected/a9dd2b5c-56c2-430b-9369-7013692ec42d-kube-api-access-4n6k8\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.426975 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-policies\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427015 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-trusted-ca\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427043 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-encryption-config\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427084 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmkws\" (UniqueName: \"kubernetes.io/projected/1403369e-fea9-4c82-9432-af21d937566c-kube-api-access-jmkws\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427125 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-config\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427158 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427225 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427274 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427335 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-auth-proxy-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427459 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st8b2\" (UniqueName: \"kubernetes.io/projected/df97f9bb-69d0-449f-87a8-39eac97fea87-kube-api-access-st8b2\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427507 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-config\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427541 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.427568 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/df97f9bb-69d0-449f-87a8-39eac97fea87-machine-approver-tls\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.429331 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.429683 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.432081 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.433436 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.433909 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.434801 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.434931 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435062 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435227 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435372 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435507 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435236 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.434814 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.435690 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.445606 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447153 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447531 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447744 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447759 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447840 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447937 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448027 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448074 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448094 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448148 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448209 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448275 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448032 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448318 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.448384 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.447601 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.449865 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.449942 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450341 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450432 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450568 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450734 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450892 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.450898 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.451002 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.451108 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.451344 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.451481 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.451711 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.452564 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.474221 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6xfbd"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.475098 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.475855 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.478873 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.479457 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.480506 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.480879 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.481316 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.481790 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-x49kq"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.483917 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.484025 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.484479 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.485124 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.505328 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.506276 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.507269 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-s5vdj"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.507640 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bdt97"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.507934 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.508733 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.508911 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.510542 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.511131 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.514448 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.508641 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.516145 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.516524 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.516605 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.516783 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.516950 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.517768 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.518083 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.518135 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.518501 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.519630 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-rhq9p"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.520628 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.520734 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.521263 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.522693 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.523987 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533386 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533431 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-config\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533455 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6cc9\" (UniqueName: \"kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533476 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-default-certificate\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533501 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533518 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-config\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533537 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533557 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533574 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533591 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-metrics-certs\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533607 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6dce9696-4500-4073-86f7-479ca63279bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533633 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/71d0663f-6a96-43ff-91fe-25bf58eb996e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533650 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533668 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-auth-proxy-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533685 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n5r8\" (UniqueName: \"kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533700 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533714 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8773f474-ef91-4f45-8461-3a991e6b45ee-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533731 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-images\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533750 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st8b2\" (UniqueName: \"kubernetes.io/projected/df97f9bb-69d0-449f-87a8-39eac97fea87-kube-api-access-st8b2\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533770 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-config\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533785 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd9ct\" (UniqueName: \"kubernetes.io/projected/e6a5dfee-6def-4a85-9e8b-854f91517c58-kube-api-access-kd9ct\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533802 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533816 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/df97f9bb-69d0-449f-87a8-39eac97fea87-machine-approver-tls\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533833 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-client\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533850 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533903 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-serving-cert\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533967 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.533986 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-stats-auth\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534002 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxlrp\" (UniqueName: \"kubernetes.io/projected/8773f474-ef91-4f45-8461-3a991e6b45ee-kube-api-access-bxlrp\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534023 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gwcw\" (UniqueName: \"kubernetes.io/projected/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-kube-api-access-8gwcw\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534038 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md8qr\" (UniqueName: \"kubernetes.io/projected/71d0663f-6a96-43ff-91fe-25bf58eb996e-kube-api-access-md8qr\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534053 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534069 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp4dx\" (UniqueName: \"kubernetes.io/projected/361fda8f-a29d-4f1c-8356-33d8d94ec967-kube-api-access-jp4dx\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534087 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8773f474-ef91-4f45-8461-3a991e6b45ee-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534108 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534133 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1403369e-fea9-4c82-9432-af21d937566c-serving-cert\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534152 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-dir\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534186 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534205 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534225 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534245 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6a5dfee-6def-4a85-9e8b-854f91517c58-service-ca-bundle\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534261 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/361fda8f-a29d-4f1c-8356-33d8d94ec967-metrics-tls\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534290 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-serving-cert\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534323 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rgl5\" (UniqueName: \"kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534345 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-service-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534366 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534384 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534402 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8cbt\" (UniqueName: \"kubernetes.io/projected/930eb788-1c1a-41e5-8989-7fbbf25c5da0-kube-api-access-w8cbt\") pod \"downloads-7954f5f757-s5vdj\" (UID: \"930eb788-1c1a-41e5-8989-7fbbf25c5da0\") " pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534418 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6dce9696-4500-4073-86f7-479ca63279bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534438 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n6k8\" (UniqueName: \"kubernetes.io/projected/a9dd2b5c-56c2-430b-9369-7013692ec42d-kube-api-access-4n6k8\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534455 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-serving-cert\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534473 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-policies\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-trusted-ca\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534526 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-encryption-config\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534546 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534564 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdthv\" (UniqueName: \"kubernetes.io/projected/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-kube-api-access-pdthv\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534583 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmkws\" (UniqueName: \"kubernetes.io/projected/1403369e-fea9-4c82-9432-af21d937566c-kube-api-access-jmkws\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534599 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.534619 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brf2h\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-kube-api-access-brf2h\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.535315 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.535399 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-auth-proxy-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.535456 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-dir\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536087 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536151 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-config\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536228 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536289 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536358 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536380 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536416 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536506 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536547 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536567 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536381 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536720 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536790 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536838 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.536939 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537053 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537411 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df97f9bb-69d0-449f-87a8-39eac97fea87-config\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537618 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537818 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537976 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.538550 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.539850 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.540420 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-config\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.540755 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.541047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.542032 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.537619 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.542561 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-service-ca-bundle\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.542966 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-audit-policies\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.543368 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1403369e-fea9-4c82-9432-af21d937566c-trusted-ca\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.544078 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.544353 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9dd2b5c-56c2-430b-9369-7013692ec42d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.546955 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.547115 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-encryption-config\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.547332 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.548185 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.550252 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.551736 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.552079 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1403369e-fea9-4c82-9432-af21d937566c-serving-cert\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.553235 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.559388 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-etcd-client\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.559677 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9dd2b5c-56c2-430b-9369-7013692ec42d-serving-cert\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.559864 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-serving-cert\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.561678 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.563183 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.563993 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.564406 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.565397 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.570924 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.573108 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.575010 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.576111 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/df97f9bb-69d0-449f-87a8-39eac97fea87-machine-approver-tls\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.576794 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.578719 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.585257 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.586542 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.593355 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.593974 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.594346 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hc54l"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.595095 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.595601 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.599781 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.600672 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.606415 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.607118 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rzdlg"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.607142 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-h8q6s"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.607277 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.608540 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.608997 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-n5lvh"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.609500 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.609726 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.609887 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-4kvm8"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.610822 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.613042 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.613248 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.613956 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.614488 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.614762 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.620433 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.621109 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6xfbd"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.622842 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bdt97"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.624421 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.630961 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.631069 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6ctqs"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.631125 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s5vdj"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.631138 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zsktt"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.634789 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.635319 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.636894 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8773f474-ef91-4f45-8461-3a991e6b45ee-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.636984 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637037 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637075 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637076 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637214 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6a5dfee-6def-4a85-9e8b-854f91517c58-service-ca-bundle\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637250 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/361fda8f-a29d-4f1c-8356-33d8d94ec967-metrics-tls\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637325 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637344 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8cbt\" (UniqueName: \"kubernetes.io/projected/930eb788-1c1a-41e5-8989-7fbbf25c5da0-kube-api-access-w8cbt\") pod \"downloads-7954f5f757-s5vdj\" (UID: \"930eb788-1c1a-41e5-8989-7fbbf25c5da0\") " pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637366 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6dce9696-4500-4073-86f7-479ca63279bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637398 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-serving-cert\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637422 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdthv\" (UniqueName: \"kubernetes.io/projected/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-kube-api-access-pdthv\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637448 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637475 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637491 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brf2h\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-kube-api-access-brf2h\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637510 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637536 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6cc9\" (UniqueName: \"kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637555 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-default-certificate\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637574 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-config\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637600 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637625 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637643 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-metrics-certs\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637659 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6dce9696-4500-4073-86f7-479ca63279bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637661 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637682 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/71d0663f-6a96-43ff-91fe-25bf58eb996e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637802 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8773f474-ef91-4f45-8461-3a991e6b45ee-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637855 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n5r8\" (UniqueName: \"kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637880 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637901 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-images\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637949 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd9ct\" (UniqueName: \"kubernetes.io/projected/e6a5dfee-6def-4a85-9e8b-854f91517c58-kube-api-access-kd9ct\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.637977 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.638017 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-stats-auth\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.638038 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxlrp\" (UniqueName: \"kubernetes.io/projected/8773f474-ef91-4f45-8461-3a991e6b45ee-kube-api-access-bxlrp\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.638061 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.638107 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp4dx\" (UniqueName: \"kubernetes.io/projected/361fda8f-a29d-4f1c-8356-33d8d94ec967-kube-api-access-jp4dx\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.638145 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md8qr\" (UniqueName: \"kubernetes.io/projected/71d0663f-6a96-43ff-91fe-25bf58eb996e-kube-api-access-md8qr\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.639162 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8773f474-ef91-4f45-8461-3a991e6b45ee-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.639580 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8773f474-ef91-4f45-8461-3a991e6b45ee-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.640196 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.640493 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.640642 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-config\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.641114 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.641220 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.641778 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.642321 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.642378 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.643074 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/71d0663f-6a96-43ff-91fe-25bf58eb996e-images\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.643206 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.643802 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.643997 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.645365 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.645642 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.645801 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.648067 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-serving-cert\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.648232 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.649212 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.650017 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.650238 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-x49kq"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.651268 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/361fda8f-a29d-4f1c-8356-33d8d94ec967-metrics-tls\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.651665 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.652120 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/71d0663f-6a96-43ff-91fe-25bf58eb996e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.653037 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.653572 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.654372 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.654639 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.655548 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hc54l"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.656512 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.657518 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.658475 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.660162 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xgchj"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.661136 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.661296 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.662363 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9kwv8"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.665443 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.665681 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.665784 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.665809 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.669462 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-n5lvh"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.671222 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.681042 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.684507 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.686504 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.686823 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xgchj"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.688041 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9kwv8"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.689160 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-ldwpn"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.689601 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.690088 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.690295 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-ldwpn"] Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.709832 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.730608 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.749798 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.770525 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.790935 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.810478 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.830916 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.849887 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.870638 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.889668 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.910966 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.930607 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.949987 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.971194 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 31 05:41:20 crc kubenswrapper[4712]: I0131 05:41:20.990503 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.011054 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.031666 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.050977 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.070441 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.089955 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.109373 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.111343 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6a5dfee-6def-4a85-9e8b-854f91517c58-service-ca-bundle\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.130640 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.143910 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-stats-auth\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.150413 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.162389 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-metrics-certs\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.169869 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.189395 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.194651 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e6a5dfee-6def-4a85-9e8b-854f91517c58-default-certificate\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.210092 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.230931 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.250009 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.269635 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.289292 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.309892 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.336999 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.341743 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6dce9696-4500-4073-86f7-479ca63279bc-trusted-ca\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.349783 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.369888 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.373718 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6dce9696-4500-4073-86f7-479ca63279bc-metrics-tls\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.390809 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.410678 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.479669 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st8b2\" (UniqueName: \"kubernetes.io/projected/df97f9bb-69d0-449f-87a8-39eac97fea87-kube-api-access-st8b2\") pod \"machine-approver-56656f9798-c48hj\" (UID: \"df97f9bb-69d0-449f-87a8-39eac97fea87\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.490588 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gwcw\" (UniqueName: \"kubernetes.io/projected/2d94d9e7-57bf-4e16-8587-5739a7dfb0a2-kube-api-access-8gwcw\") pod \"authentication-operator-69f744f599-rzdlg\" (UID: \"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.506271 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rgl5\" (UniqueName: \"kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5\") pod \"route-controller-manager-6576b87f9c-gn8v6\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.528988 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n6k8\" (UniqueName: \"kubernetes.io/projected/a9dd2b5c-56c2-430b-9369-7013692ec42d-kube-api-access-4n6k8\") pod \"apiserver-7bbb656c7d-klknc\" (UID: \"a9dd2b5c-56c2-430b-9369-7013692ec42d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.546186 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmkws\" (UniqueName: \"kubernetes.io/projected/1403369e-fea9-4c82-9432-af21d937566c-kube-api-access-jmkws\") pod \"console-operator-58897d9998-6ctqs\" (UID: \"1403369e-fea9-4c82-9432-af21d937566c\") " pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.548071 4712 request.go:700] Waited for 1.004028914s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-879f6c89f-7mvft Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.571738 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.591039 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.610120 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.622632 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.630461 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.651970 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.670917 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.679958 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.691131 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.711157 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.728918 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.736713 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.739897 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.750475 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.771230 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.791865 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.810839 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.811891 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.830325 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.849940 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.870215 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.876100 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:41:21 crc kubenswrapper[4712]: W0131 05:41:21.887676 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbea73a1_2703_4775_9f42_8c8340e76f46.slice/crio-04e652c086148c2da7505f775cf86074dce681ee123a160831af4e9fb73efb70 WatchSource:0}: Error finding container 04e652c086148c2da7505f775cf86074dce681ee123a160831af4e9fb73efb70: Status 404 returned error can't find the container with id 04e652c086148c2da7505f775cf86074dce681ee123a160831af4e9fb73efb70 Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.890487 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.911059 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.922368 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc"] Jan 31 05:41:21 crc kubenswrapper[4712]: W0131 05:41:21.925935 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9dd2b5c_56c2_430b_9369_7013692ec42d.slice/crio-2a6c953d000025a88d9105411afdbdc9291b7e4bfa8f13685c0bfc660024a846 WatchSource:0}: Error finding container 2a6c953d000025a88d9105411afdbdc9291b7e4bfa8f13685c0bfc660024a846: Status 404 returned error can't find the container with id 2a6c953d000025a88d9105411afdbdc9291b7e4bfa8f13685c0bfc660024a846 Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.933961 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.950259 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.972553 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 31 05:41:21 crc kubenswrapper[4712]: I0131 05:41:21.989752 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.014521 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.025857 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rzdlg"] Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.029848 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.049609 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.058237 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6ctqs"] Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.070243 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.089916 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.109620 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.130105 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.150571 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.170067 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.189516 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.209968 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.225464 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" event={"ID":"fbea73a1-2703-4775-9f42-8c8340e76f46","Type":"ContainerStarted","Data":"04e652c086148c2da7505f775cf86074dce681ee123a160831af4e9fb73efb70"} Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.226600 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" event={"ID":"a9dd2b5c-56c2-430b-9369-7013692ec42d","Type":"ContainerStarted","Data":"2a6c953d000025a88d9105411afdbdc9291b7e4bfa8f13685c0bfc660024a846"} Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.227809 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" event={"ID":"1403369e-fea9-4c82-9432-af21d937566c","Type":"ContainerStarted","Data":"5be699163a6419f4c91057f0260161307617e31d7d9d5580ac14e89ac76c8999"} Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.229276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" event={"ID":"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2","Type":"ContainerStarted","Data":"4b6779b177524596875ba09051ed04b2a610c196521fe86f5269a4c650d49387"} Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.230141 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.230600 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" event={"ID":"df97f9bb-69d0-449f-87a8-39eac97fea87","Type":"ContainerStarted","Data":"3741c96d9d829c4621be9255e8c2d26ae3b5998037993057ef07dc99ae50b31e"} Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.250693 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.269865 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.291458 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.311075 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.330996 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.350641 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.371437 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.390507 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.410666 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.437651 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.450726 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.470238 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.507577 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md8qr\" (UniqueName: \"kubernetes.io/projected/71d0663f-6a96-43ff-91fe-25bf58eb996e-kube-api-access-md8qr\") pod \"machine-api-operator-5694c8668f-h8q6s\" (UID: \"71d0663f-6a96-43ff-91fe-25bf58eb996e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.523972 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp4dx\" (UniqueName: \"kubernetes.io/projected/361fda8f-a29d-4f1c-8356-33d8d94ec967-kube-api-access-jp4dx\") pod \"dns-operator-744455d44c-x49kq\" (UID: \"361fda8f-a29d-4f1c-8356-33d8d94ec967\") " pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.524704 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.548529 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.548310 4712 request.go:700] Waited for 1.909303459s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/serviceaccounts/ingress-operator/token Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.549962 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8cbt\" (UniqueName: \"kubernetes.io/projected/930eb788-1c1a-41e5-8989-7fbbf25c5da0-kube-api-access-w8cbt\") pod \"downloads-7954f5f757-s5vdj\" (UID: \"930eb788-1c1a-41e5-8989-7fbbf25c5da0\") " pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.574208 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brf2h\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-kube-api-access-brf2h\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.586736 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6dce9696-4500-4073-86f7-479ca63279bc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4d8k7\" (UID: \"6dce9696-4500-4073-86f7-479ca63279bc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.602611 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.610693 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxlrp\" (UniqueName: \"kubernetes.io/projected/8773f474-ef91-4f45-8461-3a991e6b45ee-kube-api-access-bxlrp\") pod \"openshift-controller-manager-operator-756b6f6bc6-2xf7t\" (UID: \"8773f474-ef91-4f45-8461-3a991e6b45ee\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.640800 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd9ct\" (UniqueName: \"kubernetes.io/projected/e6a5dfee-6def-4a85-9e8b-854f91517c58-kube-api-access-kd9ct\") pod \"router-default-5444994796-rhq9p\" (UID: \"e6a5dfee-6def-4a85-9e8b-854f91517c58\") " pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.653734 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n5r8\" (UniqueName: \"kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8\") pod \"controller-manager-879f6c89f-7mvft\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.675674 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6cc9\" (UniqueName: \"kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9\") pod \"console-f9d7485db-8qb8j\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.691016 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdthv\" (UniqueName: \"kubernetes.io/projected/d7c4b8ae-585b-4c46-9ed4-e992203bd3d7-kube-api-access-pdthv\") pod \"openshift-config-operator-7777fb866f-lmtl8\" (UID: \"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.691749 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.711622 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.722143 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.730350 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.737576 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-x49kq"] Jan 31 05:41:22 crc kubenswrapper[4712]: W0131 05:41:22.743696 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod361fda8f_a29d_4f1c_8356_33d8d94ec967.slice/crio-fced5e92089cc565454ce120cf3d5aa009699e3c2102164105cdf965cf7e2b84 WatchSource:0}: Error finding container fced5e92089cc565454ce120cf3d5aa009699e3c2102164105cdf965cf7e2b84: Status 404 returned error can't find the container with id fced5e92089cc565454ce120cf3d5aa009699e3c2102164105cdf965cf7e2b84 Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.751318 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.770620 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.785340 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-h8q6s"] Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.790900 4712 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.804371 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.811062 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.818531 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.826759 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7"] Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.830745 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.834699 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:22 crc kubenswrapper[4712]: W0131 05:41:22.837807 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6dce9696_4500_4073_86f7_479ca63279bc.slice/crio-6329cceeea3d135a31af6c37a8400acce2e5f8783c5e71ccdb7d281132f98873 WatchSource:0}: Error finding container 6329cceeea3d135a31af6c37a8400acce2e5f8783c5e71ccdb7d281132f98873: Status 404 returned error can't find the container with id 6329cceeea3d135a31af6c37a8400acce2e5f8783c5e71ccdb7d281132f98873 Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.850617 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.850782 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.873880 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.889976 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.944871 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972611 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-serving-cert\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972666 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972697 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-client\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972723 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972769 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcq96\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972798 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp2q8\" (UniqueName: \"kubernetes.io/projected/85b78da7-6c11-44e7-9551-c482fcc56080-kube-api-access-dp2q8\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972834 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972893 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972935 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972973 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-service-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.972994 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-image-import-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973012 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973033 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973089 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-config\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973110 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fwrn\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-kube-api-access-9fwrn\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973142 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26a2a101-f807-429c-bd37-cbfc78572fdd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973165 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-node-pullsecrets\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973212 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973252 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-serving-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973279 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56lcd\" (UniqueName: \"kubernetes.io/projected/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-kube-api-access-56lcd\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973308 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-serving-cert\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973353 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973379 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/85b78da7-6c11-44e7-9551-c482fcc56080-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973402 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26a2a101-f807-429c-bd37-cbfc78572fdd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973431 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973456 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac898594-275a-4083-9217-886a7f4e8a5d-config\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.973486 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac898594-275a-4083-9217-886a7f4e8a5d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979487 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979526 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979580 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979638 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9sv8\" (UniqueName: \"kubernetes.io/projected/352255e7-59b9-4e55-8a87-92e73729fa80-kube-api-access-c9sv8\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979669 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979705 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979739 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe1711a8-f896-4652-925d-d3f25c982517-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979766 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979864 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979891 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979920 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-audit\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.979972 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsmd8\" (UniqueName: \"kubernetes.io/projected/68cae03a-7e79-42ab-b79a-83b774550a8f-kube-api-access-hsmd8\") pod \"migrator-59844c95c7-k6mqf\" (UID: \"68cae03a-7e79-42ab-b79a-83b774550a8f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980004 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980122 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980154 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-encryption-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980210 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980239 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-client\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980288 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac898594-275a-4083-9217-886a7f4e8a5d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980321 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26a2a101-f807-429c-bd37-cbfc78572fdd-config\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980390 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980463 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe1711a8-f896-4652-925d-d3f25c982517-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980544 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980579 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmmzx\" (UniqueName: \"kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980639 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980716 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpskm\" (UniqueName: \"kubernetes.io/projected/747781a6-dea8-4fed-af24-371ba391d6c0-kube-api-access-hpskm\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980774 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980822 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:22 crc kubenswrapper[4712]: I0131 05:41:22.980894 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-audit-dir\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:22 crc kubenswrapper[4712]: E0131 05:41:22.981698 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.481678141 +0000 UTC m=+149.575559982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.060963 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083244 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.083419 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.583390516 +0000 UTC m=+149.677272357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083515 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1561c218-0068-4279-99f0-b2417059eeed-cert\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083564 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-serving-cert\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083589 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083646 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k92ff\" (UniqueName: \"kubernetes.io/projected/c4e74709-7867-4c48-b77f-9e0e7f441b39-kube-api-access-k92ff\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083672 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083706 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26a2a101-f807-429c-bd37-cbfc78572fdd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083734 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083782 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac898594-275a-4083-9217-886a7f4e8a5d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083807 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083832 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9sv8\" (UniqueName: \"kubernetes.io/projected/352255e7-59b9-4e55-8a87-92e73729fa80-kube-api-access-c9sv8\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083859 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-apiservice-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083883 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083906 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083930 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083953 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-socket-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.083980 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe1711a8-f896-4652-925d-d3f25c982517-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084003 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-825q5\" (UniqueName: \"kubernetes.io/projected/b7673fef-02b4-4d38-bc24-998bd8ee4434-kube-api-access-825q5\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084052 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7sn5\" (UniqueName: \"kubernetes.io/projected/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-kube-api-access-x7sn5\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084077 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-927lx\" (UniqueName: \"kubernetes.io/projected/0fd4c024-c591-4752-bd9e-7bf028811d24-kube-api-access-927lx\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084100 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-registration-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084125 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084151 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084188 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084217 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-audit\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084244 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsmd8\" (UniqueName: \"kubernetes.io/projected/68cae03a-7e79-42ab-b79a-83b774550a8f-kube-api-access-hsmd8\") pod \"migrator-59844c95c7-k6mqf\" (UID: \"68cae03a-7e79-42ab-b79a-83b774550a8f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084272 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hksp9\" (UniqueName: \"kubernetes.io/projected/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-kube-api-access-hksp9\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084329 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084374 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084399 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-encryption-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084486 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084527 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26a2a101-f807-429c-bd37-cbfc78572fdd-config\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.084548 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-client\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.089848 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe1711a8-f896-4652-925d-d3f25c982517-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.090489 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.090639 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.090720 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-audit\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.090857 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.091213 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.091350 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.092011 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.094100 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26a2a101-f807-429c-bd37-cbfc78572fdd-config\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.094448 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.094880 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-serving-cert\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.097904 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.099369 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-client\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.103283 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-encryption-config\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.112189 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d63625f4-7c2b-4196-951e-99c5978046e7-proxy-tls\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.114858 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.114999 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe1711a8-f896-4652-925d-d3f25c982517-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.115166 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-mountpoint-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.115253 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.115595 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj8br\" (UniqueName: \"kubernetes.io/projected/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-kube-api-access-mj8br\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116091 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116215 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-plugins-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116255 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlp9l\" (UniqueName: \"kubernetes.io/projected/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-kube-api-access-qlp9l\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116351 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63625f4-7c2b-4196-951e-99c5978046e7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116423 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-client\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116454 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116507 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116538 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbbkg\" (UniqueName: \"kubernetes.io/projected/7818ef80-fbf1-48f4-8921-84b4d693ae62-kube-api-access-vbbkg\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116586 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116614 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-image-import-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116640 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116668 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116694 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116721 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-images\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116746 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/661bf4e2-2297-4862-9a82-9af65c1dd20c-proxy-tls\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116824 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56lcd\" (UniqueName: \"kubernetes.io/projected/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-kube-api-access-56lcd\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116874 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-certs\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116900 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-srv-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116941 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/85b78da7-6c11-44e7-9551-c482fcc56080-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116973 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac898594-275a-4083-9217-886a7f4e8a5d-config\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.116999 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117071 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117097 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cmq6\" (UniqueName: \"kubernetes.io/projected/2964b046-c398-4eaa-8f8e-a608af22570e-kube-api-access-9cmq6\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117148 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117194 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-metrics-tls\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117231 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjzl8\" (UniqueName: \"kubernetes.io/projected/1561c218-0068-4279-99f0-b2417059eeed-kube-api-access-fjzl8\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117251 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-csi-data-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117314 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117341 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-node-bootstrap-token\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117393 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckhlv\" (UniqueName: \"kubernetes.io/projected/661bf4e2-2297-4862-9a82-9af65c1dd20c-kube-api-access-ckhlv\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117426 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac898594-275a-4083-9217-886a7f4e8a5d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117452 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-cabundle\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117502 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28qg4\" (UniqueName: \"kubernetes.io/projected/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-kube-api-access-28qg4\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117532 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8s8d\" (UniqueName: \"kubernetes.io/projected/f8f21b29-8318-4378-98cc-a7af9ee0b36e-kube-api-access-t8s8d\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117557 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-srv-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117616 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117796 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117828 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmmzx\" (UniqueName: \"kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117861 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-config-volume\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117893 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5bl9\" (UniqueName: \"kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117933 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7818ef80-fbf1-48f4-8921-84b4d693ae62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117969 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpskm\" (UniqueName: \"kubernetes.io/projected/747781a6-dea8-4fed-af24-371ba391d6c0-kube-api-access-hpskm\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.117997 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118028 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-key\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118060 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118091 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118118 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118152 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8f21b29-8318-4378-98cc-a7af9ee0b36e-serving-cert\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118198 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f21b29-8318-4378-98cc-a7af9ee0b36e-config\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118230 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118258 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fd4c024-c591-4752-bd9e-7bf028811d24-tmpfs\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118295 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-audit-dir\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118321 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjkx4\" (UniqueName: \"kubernetes.io/projected/d63625f4-7c2b-4196-951e-99c5978046e7-kube-api-access-xjkx4\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118375 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-serving-cert\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118406 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118439 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118466 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-webhook-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118493 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp2q8\" (UniqueName: \"kubernetes.io/projected/85b78da7-6c11-44e7-9551-c482fcc56080-kube-api-access-dp2q8\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118551 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcq96\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118615 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118693 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118768 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-service-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118878 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmr87\" (UniqueName: \"kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.118933 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fwrn\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-kube-api-access-9fwrn\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119007 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-config\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119055 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119091 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26a2a101-f807-429c-bd37-cbfc78572fdd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119119 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-node-pullsecrets\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119152 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119215 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-serving-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119238 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2wnd\" (UniqueName: \"kubernetes.io/projected/5bb6c725-d007-4aff-9396-1cc16fec2a1a-kube-api-access-h2wnd\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.119259 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.121021 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-image-import-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.121376 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.126542 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/fe1711a8-f896-4652-925d-d3f25c982517-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.127160 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.138131 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac898594-275a-4083-9217-886a7f4e8a5d-config\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.138311 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/85b78da7-6c11-44e7-9551-c482fcc56080-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.138788 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-audit-dir\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.140272 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac898594-275a-4083-9217-886a7f4e8a5d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.141351 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.141585 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352255e7-59b9-4e55-8a87-92e73729fa80-serving-cert\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.142008 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.142660 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.143064 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.143273 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.144405 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsmd8\" (UniqueName: \"kubernetes.io/projected/68cae03a-7e79-42ab-b79a-83b774550a8f-kube-api-access-hsmd8\") pod \"migrator-59844c95c7-k6mqf\" (UID: \"68cae03a-7e79-42ab-b79a-83b774550a8f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.144518 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.146098 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-service-ca\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.146108 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.146443 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.147578 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/352255e7-59b9-4e55-8a87-92e73729fa80-etcd-serving-ca\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.149703 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.151148 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.151295 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/352255e7-59b9-4e55-8a87-92e73729fa80-node-pullsecrets\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.156044 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.656021862 +0000 UTC m=+149.749903703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.157121 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.157827 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/747781a6-dea8-4fed-af24-371ba391d6c0-config\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.158101 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/747781a6-dea8-4fed-af24-371ba391d6c0-etcd-client\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.163855 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26a2a101-f807-429c-bd37-cbfc78572fdd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.166734 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.172102 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/26a2a101-f807-429c-bd37-cbfc78572fdd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-24xjr\" (UID: \"26a2a101-f807-429c-bd37-cbfc78572fdd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.172814 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.182641 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac898594-275a-4083-9217-886a7f4e8a5d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-t4pvh\" (UID: \"ac898594-275a-4083-9217-886a7f4e8a5d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.185381 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.194740 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220045 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220350 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7818ef80-fbf1-48f4-8921-84b4d693ae62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220386 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-key\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220405 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fd4c024-c591-4752-bd9e-7bf028811d24-tmpfs\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220424 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220441 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220458 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8f21b29-8318-4378-98cc-a7af9ee0b36e-serving-cert\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220472 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f21b29-8318-4378-98cc-a7af9ee0b36e-config\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220487 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220505 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjkx4\" (UniqueName: \"kubernetes.io/projected/d63625f4-7c2b-4196-951e-99c5978046e7-kube-api-access-xjkx4\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220522 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-webhook-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220550 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.220575 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmr87\" (UniqueName: \"kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.221835 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.221869 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2wnd\" (UniqueName: \"kubernetes.io/projected/5bb6c725-d007-4aff-9396-1cc16fec2a1a-kube-api-access-h2wnd\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.221888 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.221904 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1561c218-0068-4279-99f0-b2417059eeed-cert\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.221966 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.721909196 +0000 UTC m=+149.815791037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222044 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k92ff\" (UniqueName: \"kubernetes.io/projected/c4e74709-7867-4c48-b77f-9e0e7f441b39-kube-api-access-k92ff\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222098 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222156 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-apiservice-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222198 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222221 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-socket-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222243 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-927lx\" (UniqueName: \"kubernetes.io/projected/0fd4c024-c591-4752-bd9e-7bf028811d24-kube-api-access-927lx\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222275 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-825q5\" (UniqueName: \"kubernetes.io/projected/b7673fef-02b4-4d38-bc24-998bd8ee4434-kube-api-access-825q5\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222297 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7sn5\" (UniqueName: \"kubernetes.io/projected/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-kube-api-access-x7sn5\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222318 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-registration-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222339 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222364 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hksp9\" (UniqueName: \"kubernetes.io/projected/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-kube-api-access-hksp9\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222391 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222429 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d63625f4-7c2b-4196-951e-99c5978046e7-proxy-tls\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-mountpoint-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222475 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222504 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj8br\" (UniqueName: \"kubernetes.io/projected/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-kube-api-access-mj8br\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222521 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlp9l\" (UniqueName: \"kubernetes.io/projected/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-kube-api-access-qlp9l\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222546 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222562 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-plugins-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222597 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63625f4-7c2b-4196-951e-99c5978046e7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222626 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbbkg\" (UniqueName: \"kubernetes.io/projected/7818ef80-fbf1-48f4-8921-84b4d693ae62-kube-api-access-vbbkg\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222655 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222647 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f21b29-8318-4378-98cc-a7af9ee0b36e-config\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222675 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.222696 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-images\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223195 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/661bf4e2-2297-4862-9a82-9af65c1dd20c-proxy-tls\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223251 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-certs\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223271 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-srv-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223296 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cmq6\" (UniqueName: \"kubernetes.io/projected/2964b046-c398-4eaa-8f8e-a608af22570e-kube-api-access-9cmq6\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223325 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223356 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-metrics-tls\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223380 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjzl8\" (UniqueName: \"kubernetes.io/projected/1561c218-0068-4279-99f0-b2417059eeed-kube-api-access-fjzl8\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223409 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-csi-data-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223434 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckhlv\" (UniqueName: \"kubernetes.io/projected/661bf4e2-2297-4862-9a82-9af65c1dd20c-kube-api-access-ckhlv\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223460 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-node-bootstrap-token\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223478 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-cabundle\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223513 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28qg4\" (UniqueName: \"kubernetes.io/projected/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-kube-api-access-28qg4\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223534 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8s8d\" (UniqueName: \"kubernetes.io/projected/f8f21b29-8318-4378-98cc-a7af9ee0b36e-kube-api-access-t8s8d\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223560 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-srv-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223592 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-config-volume\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.223608 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5bl9\" (UniqueName: \"kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.224158 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fd4c024-c591-4752-bd9e-7bf028811d24-tmpfs\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.226283 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-webhook-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.226687 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.227239 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.227691 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8f21b29-8318-4378-98cc-a7af9ee0b36e-serving-cert\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.227835 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.727817307 +0000 UTC m=+149.821699148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.228670 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.229045 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-registration-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.230063 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fd4c024-c591-4752-bd9e-7bf028811d24-apiservice-cert\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.230138 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-socket-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.230992 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63625f4-7c2b-4196-951e-99c5978046e7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.231047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-mountpoint-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.231715 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-srv-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.234352 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-key\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.234874 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-metrics-tls\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.235194 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-csi-data-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.237963 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d63625f4-7c2b-4196-951e-99c5978046e7-proxy-tls\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.238727 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-images\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.238834 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-node-bootstrap-token\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.239239 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/661bf4e2-2297-4862-9a82-9af65c1dd20c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.239739 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b7673fef-02b4-4d38-bc24-998bd8ee4434-signing-cabundle\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.240392 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.240406 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-config-volume\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.241545 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.241899 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.242891 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.243487 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.243684 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.244160 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.244223 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5bb6c725-d007-4aff-9396-1cc16fec2a1a-plugins-dir\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.245116 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9sv8\" (UniqueName: \"kubernetes.io/projected/352255e7-59b9-4e55-8a87-92e73729fa80-kube-api-access-c9sv8\") pod \"apiserver-76f77b778f-6xfbd\" (UID: \"352255e7-59b9-4e55-8a87-92e73729fa80\") " pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.251230 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/661bf4e2-2297-4862-9a82-9af65c1dd20c-proxy-tls\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.251943 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.252361 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56lcd\" (UniqueName: \"kubernetes.io/projected/3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0-kube-api-access-56lcd\") pod \"openshift-apiserver-operator-796bbdcf4f-rclbd\" (UID: \"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.252801 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c4e74709-7867-4c48-b77f-9e0e7f441b39-srv-cert\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.253154 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.256920 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7818ef80-fbf1-48f4-8921-84b4d693ae62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.257318 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2964b046-c398-4eaa-8f8e-a608af22570e-certs\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.257998 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1561c218-0068-4279-99f0-b2417059eeed-cert\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.278676 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s5vdj"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.279957 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp2q8\" (UniqueName: \"kubernetes.io/projected/85b78da7-6c11-44e7-9551-c482fcc56080-kube-api-access-dp2q8\") pod \"cluster-samples-operator-665b6dd947-5bc9r\" (UID: \"85b78da7-6c11-44e7-9551-c482fcc56080\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.284249 4712 generic.go:334] "Generic (PLEG): container finished" podID="a9dd2b5c-56c2-430b-9369-7013692ec42d" containerID="9ea70b5af9016e35053457255485a16ca84dcb9fc437df3a98333ac583b86898" exitCode=0 Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.284332 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" event={"ID":"a9dd2b5c-56c2-430b-9369-7013692ec42d","Type":"ContainerDied","Data":"9ea70b5af9016e35053457255485a16ca84dcb9fc437df3a98333ac583b86898"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.288338 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcq96\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.308493 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" event={"ID":"71d0663f-6a96-43ff-91fe-25bf58eb996e","Type":"ContainerStarted","Data":"a40dc5f6d2cc9600d69af6bf901912de6e2f9ee626b6d17210738abed4d2cdd1"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.308547 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" event={"ID":"71d0663f-6a96-43ff-91fe-25bf58eb996e","Type":"ContainerStarted","Data":"e89a6736381add18ca2078ac74104417fa7ac9a0a544668b8bc72b204b222e9b"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.308563 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" event={"ID":"71d0663f-6a96-43ff-91fe-25bf58eb996e","Type":"ContainerStarted","Data":"587bfc04bd0f7b4f4e083996783eb57fe1c05cdef125279c3e9a097e6938d108"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.309902 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8qb8j" event={"ID":"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14","Type":"ContainerStarted","Data":"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.310270 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8qb8j" event={"ID":"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14","Type":"ContainerStarted","Data":"4fc992ab96d350ac3cc725c6187280b2c9689e5adf99d518bfc83b4665540cef"} Jan 31 05:41:23 crc kubenswrapper[4712]: W0131 05:41:23.317477 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod930eb788_1c1a_41e5_8989_7fbbf25c5da0.slice/crio-c7024d880629e507335e9de30deff2a7a41ecb61574f07d1139c36b3f258055d WatchSource:0}: Error finding container c7024d880629e507335e9de30deff2a7a41ecb61574f07d1139c36b3f258055d: Status 404 returned error can't find the container with id c7024d880629e507335e9de30deff2a7a41ecb61574f07d1139c36b3f258055d Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.321186 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.322674 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rhq9p" event={"ID":"e6a5dfee-6def-4a85-9e8b-854f91517c58","Type":"ContainerStarted","Data":"5c23f1c2f733eb4946b0db0edeed6a7f707f87c43fba08b54a5520038ba74232"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.322710 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rhq9p" event={"ID":"e6a5dfee-6def-4a85-9e8b-854f91517c58","Type":"ContainerStarted","Data":"15640002a3aba134c1eeb326b41c2bc19433a31202e33eb0d2116e8f9de5279f"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.324811 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.324897 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" event={"ID":"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7","Type":"ContainerStarted","Data":"5c0ef956d6e62842ae94f548a5830007eba68819e6e0e34b736d21bcdab9ecda"} Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.325487 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.825470317 +0000 UTC m=+149.919352158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.326807 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpskm\" (UniqueName: \"kubernetes.io/projected/747781a6-dea8-4fed-af24-371ba391d6c0-kube-api-access-hpskm\") pod \"etcd-operator-b45778765-bdt97\" (UID: \"747781a6-dea8-4fed-af24-371ba391d6c0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.330261 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.332053 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" event={"ID":"df97f9bb-69d0-449f-87a8-39eac97fea87","Type":"ContainerStarted","Data":"56ae82637b00331a58471ed0f8fe1ca95e90fe4ca42c6477282332fb2c5e19ae"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.332099 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" event={"ID":"df97f9bb-69d0-449f-87a8-39eac97fea87","Type":"ContainerStarted","Data":"6b523013ec89638b566d0569edc2f6e764c2094a239ef6872b49c531a8b9302c"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.340504 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" event={"ID":"361fda8f-a29d-4f1c-8356-33d8d94ec967","Type":"ContainerStarted","Data":"776f371b21493e4d60f0674c17a9e17e2cbd6de058d102282977555874923845"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.341625 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" event={"ID":"361fda8f-a29d-4f1c-8356-33d8d94ec967","Type":"ContainerStarted","Data":"fced5e92089cc565454ce120cf3d5aa009699e3c2102164105cdf965cf7e2b84"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.347203 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.348318 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" event={"ID":"2d94d9e7-57bf-4e16-8587-5739a7dfb0a2","Type":"ContainerStarted","Data":"8aafe0a91c126803e2c273209ee66e49c82c0816048be7ace2c1b0ded833d376"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.349060 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fwrn\" (UniqueName: \"kubernetes.io/projected/fe1711a8-f896-4652-925d-d3f25c982517-kube-api-access-9fwrn\") pod \"cluster-image-registry-operator-dc59b4c8b-pc46d\" (UID: \"fe1711a8-f896-4652-925d-d3f25c982517\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.360793 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" event={"ID":"1403369e-fea9-4c82-9432-af21d937566c","Type":"ContainerStarted","Data":"befac9b413e682ed54111022fabe0fc5420db0a7c7602f1c5b18e4287feb0f50"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.362049 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.370420 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmmzx\" (UniqueName: \"kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx\") pod \"oauth-openshift-558db77b4-zsktt\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.373792 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" event={"ID":"6dce9696-4500-4073-86f7-479ca63279bc","Type":"ContainerStarted","Data":"9db01c6c6962932bf41be0825e622e69a907ea927960e6f9efe7e77408d0dca6"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.373843 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" event={"ID":"6dce9696-4500-4073-86f7-479ca63279bc","Type":"ContainerStarted","Data":"6329cceeea3d135a31af6c37a8400acce2e5f8783c5e71ccdb7d281132f98873"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.374804 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.374978 4712 patch_prober.go:28] interesting pod/console-operator-58897d9998-6ctqs container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.375022 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" podUID="1403369e-fea9-4c82-9432-af21d937566c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.381302 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.392001 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" event={"ID":"fbea73a1-2703-4775-9f42-8c8340e76f46","Type":"ContainerStarted","Data":"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae"} Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.393052 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.393969 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.406032 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjkx4\" (UniqueName: \"kubernetes.io/projected/d63625f4-7c2b-4196-951e-99c5978046e7-kube-api-access-xjkx4\") pod \"machine-config-controller-84d6567774-lcb4j\" (UID: \"d63625f4-7c2b-4196-951e-99c5978046e7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: W0131 05:41:23.411363 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8773f474_ef91_4f45_8461_3a991e6b45ee.slice/crio-95a3b5744537c7b36c6f2ead1ec89b7944f44687518640120ba3ca7ce525eb6e WatchSource:0}: Error finding container 95a3b5744537c7b36c6f2ead1ec89b7944f44687518640120ba3ca7ce525eb6e: Status 404 returned error can't find the container with id 95a3b5744537c7b36c6f2ead1ec89b7944f44687518640120ba3ca7ce525eb6e Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.420041 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.425154 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-927lx\" (UniqueName: \"kubernetes.io/projected/0fd4c024-c591-4752-bd9e-7bf028811d24-kube-api-access-927lx\") pod \"packageserver-d55dfcdfc-zw9hr\" (UID: \"0fd4c024-c591-4752-bd9e-7bf028811d24\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.428042 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.432018 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:23.931992035 +0000 UTC m=+150.025873876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.440828 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.448579 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.451264 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k92ff\" (UniqueName: \"kubernetes.io/projected/c4e74709-7867-4c48-b77f-9e0e7f441b39-kube-api-access-k92ff\") pod \"olm-operator-6b444d44fb-wshvv\" (UID: \"c4e74709-7867-4c48-b77f-9e0e7f441b39\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.455320 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.468304 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cmq6\" (UniqueName: \"kubernetes.io/projected/2964b046-c398-4eaa-8f8e-a608af22570e-kube-api-access-9cmq6\") pod \"machine-config-server-4kvm8\" (UID: \"2964b046-c398-4eaa-8f8e-a608af22570e\") " pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.489145 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-825q5\" (UniqueName: \"kubernetes.io/projected/b7673fef-02b4-4d38-bc24-998bd8ee4434-kube-api-access-825q5\") pod \"service-ca-9c57cc56f-n5lvh\" (UID: \"b7673fef-02b4-4d38-bc24-998bd8ee4434\") " pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.512124 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7sn5\" (UniqueName: \"kubernetes.io/projected/0bd1b455-98b1-4cdc-b930-6c7a251f39a7-kube-api-access-x7sn5\") pod \"catalog-operator-68c6474976-qls5q\" (UID: \"0bd1b455-98b1-4cdc-b930-6c7a251f39a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.523083 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.529447 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.529762 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.029717786 +0000 UTC m=+150.123599627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.530118 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.530555 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.030539567 +0000 UTC m=+150.124421478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.539081 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.542684 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbbkg\" (UniqueName: \"kubernetes.io/projected/7818ef80-fbf1-48f4-8921-84b4d693ae62-kube-api-access-vbbkg\") pod \"package-server-manager-789f6589d5-6lmn7\" (UID: \"7818ef80-fbf1-48f4-8921-84b4d693ae62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.547497 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjzl8\" (UniqueName: \"kubernetes.io/projected/1561c218-0068-4279-99f0-b2417059eeed-kube-api-access-fjzl8\") pod \"ingress-canary-ldwpn\" (UID: \"1561c218-0068-4279-99f0-b2417059eeed\") " pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.554721 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.563053 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.578163 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckhlv\" (UniqueName: \"kubernetes.io/projected/661bf4e2-2297-4862-9a82-9af65c1dd20c-kube-api-access-ckhlv\") pod \"machine-config-operator-74547568cd-2kqpg\" (UID: \"661bf4e2-2297-4862-9a82-9af65c1dd20c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.586705 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.590303 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2wnd\" (UniqueName: \"kubernetes.io/projected/5bb6c725-d007-4aff-9396-1cc16fec2a1a-kube-api-access-h2wnd\") pod \"csi-hostpathplugin-9kwv8\" (UID: \"5bb6c725-d007-4aff-9396-1cc16fec2a1a\") " pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.590606 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.609482 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4kvm8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.614052 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.614164 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmr87\" (UniqueName: \"kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87\") pod \"marketplace-operator-79b997595-zkv2d\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.618063 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.629513 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6add969-5d7d-4966-bcaf-46cc6e60c3ed-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-swhvs\" (UID: \"c6add969-5d7d-4966-bcaf-46cc6e60c3ed\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.634915 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.635324 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.13530823 +0000 UTC m=+150.229190071 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.641869 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.654260 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28qg4\" (UniqueName: \"kubernetes.io/projected/728479af-3ccc-4ab2-8f85-e5bc28a6e7d9-kube-api-access-28qg4\") pod \"multus-admission-controller-857f4d67dd-hc54l\" (UID: \"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.655192 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.665239 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-ldwpn" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.707295 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5bl9\" (UniqueName: \"kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9\") pod \"collect-profiles-29497290-bbd4q\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.719841 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8s8d\" (UniqueName: \"kubernetes.io/projected/f8f21b29-8318-4378-98cc-a7af9ee0b36e-kube-api-access-t8s8d\") pod \"service-ca-operator-777779d784-jvq7b\" (UID: \"f8f21b29-8318-4378-98cc-a7af9ee0b36e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.737003 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.737398 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.237383873 +0000 UTC m=+150.331265714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.737593 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj8br\" (UniqueName: \"kubernetes.io/projected/21b71afe-6ea3-40c2-9d8a-d500fc90f7af-kube-api-access-mj8br\") pod \"kube-storage-version-migrator-operator-b67b599dd-xhtln\" (UID: \"21b71afe-6ea3-40c2-9d8a-d500fc90f7af\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.745685 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlp9l\" (UniqueName: \"kubernetes.io/projected/11d0bb16-cbd3-4f4e-8b48-9f5676ec5881-kube-api-access-qlp9l\") pod \"dns-default-xgchj\" (UID: \"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881\") " pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.765188 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.767084 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hksp9\" (UniqueName: \"kubernetes.io/projected/dc0eb91d-cc30-4ef8-aa0b-be90744ba313-kube-api-access-hksp9\") pod \"control-plane-machine-set-operator-78cbb6b69f-qdzvl\" (UID: \"dc0eb91d-cc30-4ef8-aa0b-be90744ba313\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.811026 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.821781 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.840235 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.840531 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.340516644 +0000 UTC m=+150.434398485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.843784 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.869609 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.875742 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.900151 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.900784 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.927464 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.930825 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd"] Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.939884 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.948598 4712 csr.go:261] certificate signing request csr-9rxhg is approved, waiting to be issued Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.948660 4712 csr.go:257] certificate signing request csr-9rxhg is issued Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.951045 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:23 crc kubenswrapper[4712]: E0131 05:41:23.951691 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.45167465 +0000 UTC m=+150.545556491 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.963904 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" podStartSLOduration=126.963880154 podStartE2EDuration="2m6.963880154s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:23.925433925 +0000 UTC m=+150.019315766" watchObservedRunningTime="2026-01-31 05:41:23.963880154 +0000 UTC m=+150.057761995" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.976721 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.976862 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 31 05:41:23 crc kubenswrapper[4712]: I0131 05:41:23.985032 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" podStartSLOduration=125.985013307 podStartE2EDuration="2m5.985013307s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:23.977307359 +0000 UTC m=+150.071189200" watchObservedRunningTime="2026-01-31 05:41:23.985013307 +0000 UTC m=+150.078895148" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.008926 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r"] Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.056627 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.057296 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.557271174 +0000 UTC m=+150.651153015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: W0131 05:41:24.107911 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3cfab6e3_c3a3_4c3a_a475_ea5ba2b324a0.slice/crio-5d9a4de75bdfd3f4d5374ececa9ea808d9bd51653ff10be611cb05820e40fbf0 WatchSource:0}: Error finding container 5d9a4de75bdfd3f4d5374ececa9ea808d9bd51653ff10be611cb05820e40fbf0: Status 404 returned error can't find the container with id 5d9a4de75bdfd3f4d5374ececa9ea808d9bd51653ff10be611cb05820e40fbf0 Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.123000 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6xfbd"] Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.160925 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.660909277 +0000 UTC m=+150.754791118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.160482 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.250480 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-h8q6s" podStartSLOduration=126.250446468 podStartE2EDuration="2m6.250446468s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:24.247063232 +0000 UTC m=+150.340945073" watchObservedRunningTime="2026-01-31 05:41:24.250446468 +0000 UTC m=+150.344328309" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.262922 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.263130 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.763086033 +0000 UTC m=+150.856967874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.263227 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.263726 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.7637172 +0000 UTC m=+150.857599041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.367137 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.367543 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.867521158 +0000 UTC m=+150.961402989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.416662 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" event={"ID":"a9dd2b5c-56c2-430b-9369-7013692ec42d","Type":"ContainerStarted","Data":"78b44e2e5438ec7235222bf408289ca050346026a045f7999ae616078037a244"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.449806 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-rhq9p" podStartSLOduration=126.449787582 podStartE2EDuration="2m6.449787582s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:24.449031272 +0000 UTC m=+150.542913113" watchObservedRunningTime="2026-01-31 05:41:24.449787582 +0000 UTC m=+150.543669423" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.478749 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.479111 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:24.979098245 +0000 UTC m=+151.072980086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.481976 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" event={"ID":"361fda8f-a29d-4f1c-8356-33d8d94ec967","Type":"ContainerStarted","Data":"bacc6080cf5f1a575a48e6431c15fbf6484e3fa87e71b67127b40e6c23421182"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.485667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" event={"ID":"ac898594-275a-4083-9217-886a7f4e8a5d","Type":"ContainerStarted","Data":"a6b17afa329cc3212a543461588e4aa5cac3bc6ecfcb032c65d09a871b87cace"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.523495 4712 generic.go:334] "Generic (PLEG): container finished" podID="d7c4b8ae-585b-4c46-9ed4-e992203bd3d7" containerID="8a26952dd04ce6604f5d313a924f72f4d9f28d7ebf7540fe1e187940cbbb290a" exitCode=0 Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.523684 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.523726 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580442 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.580648 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.080612424 +0000 UTC m=+151.174494275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580728 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580800 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580830 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580916 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.580946 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.582812 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.08278484 +0000 UTC m=+151.176666681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.583680 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.592367 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.667908 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.667989 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-6ctqs" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668003 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s5vdj" event={"ID":"930eb788-1c1a-41e5-8989-7fbbf25c5da0","Type":"ContainerStarted","Data":"b1dff86b64d6c6ad095c5356fa818076bc83dbe1cc2de01ce52867947f2c08e4"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668027 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s5vdj" event={"ID":"930eb788-1c1a-41e5-8989-7fbbf25c5da0","Type":"ContainerStarted","Data":"c7024d880629e507335e9de30deff2a7a41ecb61574f07d1139c36b3f258055d"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668041 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" event={"ID":"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7","Type":"ContainerDied","Data":"8a26952dd04ce6604f5d313a924f72f4d9f28d7ebf7540fe1e187940cbbb290a"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668063 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" event={"ID":"352255e7-59b9-4e55-8a87-92e73729fa80","Type":"ContainerStarted","Data":"7a58b8b04023f2cd93737af1ab88eef0409d7c807fb2f1329f2fa1bb16eb11af"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668075 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf"] Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668091 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" event={"ID":"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0","Type":"ContainerStarted","Data":"5d9a4de75bdfd3f4d5374ececa9ea808d9bd51653ff10be611cb05820e40fbf0"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668101 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" event={"ID":"549f7a4d-8fba-47e2-8b51-bb660fe413b4","Type":"ContainerStarted","Data":"93b49e79bafecf62a18560cc62b8043a1af55f0035e67b1b35c41ac90483358d"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668113 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" event={"ID":"8773f474-ef91-4f45-8461-3a991e6b45ee","Type":"ContainerStarted","Data":"95a3b5744537c7b36c6f2ead1ec89b7944f44687518640120ba3ca7ce525eb6e"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668124 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4kvm8" event={"ID":"2964b046-c398-4eaa-8f8e-a608af22570e","Type":"ContainerStarted","Data":"fa307c5e86c96faa16d235b5dfb2caf2cb9b927a00ee85a0987aefb5314dc5df"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668134 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4d8k7" event={"ID":"6dce9696-4500-4073-86f7-479ca63279bc","Type":"ContainerStarted","Data":"569c06d877c7abb6e741ef130987b8a965591dabdc9d43b927ae1c3b515e7b12"} Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.668742 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.672299 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.675352 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.682824 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.683284 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.183261012 +0000 UTC m=+151.277142853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.683643 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.683771 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.686032 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.186025483 +0000 UTC m=+151.279907324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.790085 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.792349 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.292317964 +0000 UTC m=+151.386199795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.793507 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.794301 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.294282446 +0000 UTC m=+151.388164287 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.894645 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.894789 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.394753927 +0000 UTC m=+151.488635768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.903301 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:24 crc kubenswrapper[4712]: E0131 05:41:24.903968 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.403948853 +0000 UTC m=+151.497830694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.949963 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-31 05:36:23 +0000 UTC, rotation deadline is 2026-11-02 12:27:08.480254301 +0000 UTC Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.949998 4712 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6606h45m43.530259294s for next certificate rotation Jan 31 05:41:24 crc kubenswrapper[4712]: I0131 05:41:24.966211 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.004668 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.005030 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.505014161 +0000 UTC m=+151.598896002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.009743 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8qb8j" podStartSLOduration=128.009729032 podStartE2EDuration="2m8.009729032s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:24.985522899 +0000 UTC m=+151.079404740" watchObservedRunningTime="2026-01-31 05:41:25.009729032 +0000 UTC m=+151.103610873" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.018341 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:25 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:25 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:25 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.018438 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.065323 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rzdlg" podStartSLOduration=128.06530351 podStartE2EDuration="2m8.06530351s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.040872052 +0000 UTC m=+151.134753893" watchObservedRunningTime="2026-01-31 05:41:25.06530351 +0000 UTC m=+151.159185351" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.097252 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" podStartSLOduration=127.097156429 podStartE2EDuration="2m7.097156429s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.066792618 +0000 UTC m=+151.160674459" watchObservedRunningTime="2026-01-31 05:41:25.097156429 +0000 UTC m=+151.191038280" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.110211 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.110664 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.610647636 +0000 UTC m=+151.704529477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.214268 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.214624 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.714607687 +0000 UTC m=+151.808489528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.279259 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d"] Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.316417 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.316874 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.816857355 +0000 UTC m=+151.910739196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.352483 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c48hj" podStartSLOduration=128.35245749 podStartE2EDuration="2m8.35245749s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.349226657 +0000 UTC m=+151.443108508" watchObservedRunningTime="2026-01-31 05:41:25.35245749 +0000 UTC m=+151.446339351" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.379400 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7"] Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.409755 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-x49kq" podStartSLOduration=127.409720071 podStartE2EDuration="2m7.409720071s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.402574128 +0000 UTC m=+151.496455989" watchObservedRunningTime="2026-01-31 05:41:25.409720071 +0000 UTC m=+151.503601922" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.418824 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.419075 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.919030361 +0000 UTC m=+152.012912202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.419285 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.419655 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:25.919640806 +0000 UTC m=+152.013522647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.495213 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" podStartSLOduration=127.495196938 podStartE2EDuration="2m7.495196938s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.494607603 +0000 UTC m=+151.588489444" watchObservedRunningTime="2026-01-31 05:41:25.495196938 +0000 UTC m=+151.589078779" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.496395 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-s5vdj" podStartSLOduration=128.496390669 podStartE2EDuration="2m8.496390669s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.444767612 +0000 UTC m=+151.538649453" watchObservedRunningTime="2026-01-31 05:41:25.496390669 +0000 UTC m=+151.590272510" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.520832 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.521069 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.021038233 +0000 UTC m=+152.114920074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.521192 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.521675 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.021664788 +0000 UTC m=+152.115546629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.569347 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" event={"ID":"549f7a4d-8fba-47e2-8b51-bb660fe413b4","Type":"ContainerStarted","Data":"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.571528 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.579318 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" event={"ID":"8773f474-ef91-4f45-8461-3a991e6b45ee","Type":"ContainerStarted","Data":"656a8ed03be4a8ccfb69a75418b8d5ebbd5f6a6b44e401166891533f0cee3690"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.587114 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" event={"ID":"d7c4b8ae-585b-4c46-9ed4-e992203bd3d7","Type":"ContainerStarted","Data":"0ed2a0b92dcb9beab5c32e64b43b9199713846a034e62e039450fe8a0c98e07b"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.589829 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.606659 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" event={"ID":"68cae03a-7e79-42ab-b79a-83b774550a8f","Type":"ContainerStarted","Data":"ca26bb0df691fab85e9a7e0552273f7052465cdd5db60582d904f8a5e57ac569"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.606739 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" event={"ID":"68cae03a-7e79-42ab-b79a-83b774550a8f","Type":"ContainerStarted","Data":"9a0dd0281c8459b0cfa8941628ab2c6da5868047175b42880bcaf57d1ec2aba4"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.607317 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2xf7t" podStartSLOduration=127.607287729 podStartE2EDuration="2m7.607287729s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.607024102 +0000 UTC m=+151.700905943" watchObservedRunningTime="2026-01-31 05:41:25.607287729 +0000 UTC m=+151.701169570" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.611545 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" event={"ID":"ac898594-275a-4083-9217-886a7f4e8a5d","Type":"ContainerStarted","Data":"350fa47393b85cdf8a2d9306101d6dbb78effdc86ec1f48d4e6c520d3d4f49ab"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.615123 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" event={"ID":"85b78da7-6c11-44e7-9551-c482fcc56080","Type":"ContainerStarted","Data":"5d5a92c986120e0938416c3fb5fda99b472aeed104d0c3ae1f9f95e3a3ccc1a6"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.615166 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" event={"ID":"85b78da7-6c11-44e7-9551-c482fcc56080","Type":"ContainerStarted","Data":"627cd0b54524ff4023d229276cdf0943afeb764c58953439cf62a72cbbeb4dbb"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.621276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" event={"ID":"3cfab6e3-c3a3-4c3a-a475-ea5ba2b324a0","Type":"ContainerStarted","Data":"5c41f3b093571b74420ca58d7ac06d79d54de8fc1e39affbdf3e928a9e212c5f"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.621538 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.621715 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.121681619 +0000 UTC m=+152.215563460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.621913 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.622353 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.122343966 +0000 UTC m=+152.216225807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.623961 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4kvm8" event={"ID":"2964b046-c398-4eaa-8f8e-a608af22570e","Type":"ContainerStarted","Data":"c69ab812487db1458afc78b2ac76ce586f236aa7c5fcb5ec74755c0c429601fd"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.625901 4712 generic.go:334] "Generic (PLEG): container finished" podID="352255e7-59b9-4e55-8a87-92e73729fa80" containerID="b5adb6ce9ef05ed28b179489235a0f63762dac91a5cbb8ec5a1be98018e9f8a4" exitCode=0 Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.628316 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" event={"ID":"352255e7-59b9-4e55-8a87-92e73729fa80","Type":"ContainerDied","Data":"b5adb6ce9ef05ed28b179489235a0f63762dac91a5cbb8ec5a1be98018e9f8a4"} Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.629240 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.629844 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.660927 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" podStartSLOduration=128.660906817 podStartE2EDuration="2m8.660906817s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.653220819 +0000 UTC m=+151.747102680" watchObservedRunningTime="2026-01-31 05:41:25.660906817 +0000 UTC m=+151.754788658" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.721748 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rclbd" podStartSLOduration=128.72173109 podStartE2EDuration="2m8.72173109s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.720505739 +0000 UTC m=+151.814387580" watchObservedRunningTime="2026-01-31 05:41:25.72173109 +0000 UTC m=+151.815612931" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.722396 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.722496 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" podStartSLOduration=127.72249029 podStartE2EDuration="2m7.72249029s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.695613549 +0000 UTC m=+151.789495380" watchObservedRunningTime="2026-01-31 05:41:25.72249029 +0000 UTC m=+151.816372131" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.727553 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.227523759 +0000 UTC m=+152.321405860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.792390 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-t4pvh" podStartSLOduration=127.792366286 podStartE2EDuration="2m7.792366286s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.790314873 +0000 UTC m=+151.884196724" watchObservedRunningTime="2026-01-31 05:41:25.792366286 +0000 UTC m=+151.886248127" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.823609 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.824161 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.324139232 +0000 UTC m=+152.418021083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.862357 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-4kvm8" podStartSLOduration=5.862329754 podStartE2EDuration="5.862329754s" podCreationTimestamp="2026-01-31 05:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:25.860836165 +0000 UTC m=+151.954718006" watchObservedRunningTime="2026-01-31 05:41:25.862329754 +0000 UTC m=+151.956211595" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.914901 4712 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7mvft container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.914977 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Jan 31 05:41:25 crc kubenswrapper[4712]: I0131 05:41:25.924692 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:25 crc kubenswrapper[4712]: E0131 05:41:25.924933 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.424916482 +0000 UTC m=+152.518798323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.017508 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:26 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:26 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:26 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.018019 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.049377 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.049796 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.549780071 +0000 UTC m=+152.643661912 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.075034 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod352255e7_59b9_4e55_8a87_92e73729fa80.slice/crio-b5adb6ce9ef05ed28b179489235a0f63762dac91a5cbb8ec5a1be98018e9f8a4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod352255e7_59b9_4e55_8a87_92e73729fa80.slice/crio-conmon-b5adb6ce9ef05ed28b179489235a0f63762dac91a5cbb8ec5a1be98018e9f8a4.scope\": RecentStats: unable to find data in memory cache]" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.154492 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.154900 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.654884802 +0000 UTC m=+152.748766633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.268462 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.269235 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.76921968 +0000 UTC m=+152.863101521 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.333489 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.358350 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bdt97"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.371684 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv"] Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.371613 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.871590421 +0000 UTC m=+152.965472262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.371527 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.371902 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.372307 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.87230007 +0000 UTC m=+152.966181911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.435589 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.473153 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.474005 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:26.973983613 +0000 UTC m=+153.067865454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.577083 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.578689 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.078675173 +0000 UTC m=+153.172557014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.632101 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" event={"ID":"68cae03a-7e79-42ab-b79a-83b774550a8f","Type":"ContainerStarted","Data":"e7e7f8d51a10c3fb4c4fed59074ba1185349423b59002f3cc0735b4d2be0b2c1"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.654228 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" event={"ID":"fe1711a8-f896-4652-925d-d3f25c982517","Type":"ContainerStarted","Data":"3e95b99b3cdac48d48cde7df5a89d3f6fea96b507a8651e9d44644a5d982b7f5"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.654278 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" event={"ID":"fe1711a8-f896-4652-925d-d3f25c982517","Type":"ContainerStarted","Data":"4c811799d6e7061d2124c96a757f307ca1e2428d9f7cd45675d591b036a381ee"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.656310 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" event={"ID":"7818ef80-fbf1-48f4-8921-84b4d693ae62","Type":"ContainerStarted","Data":"8751bcb2f2304971ae669cc14776d3f56eb1302eaf2b0573e645978f1273ddb3"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.656347 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" event={"ID":"7818ef80-fbf1-48f4-8921-84b4d693ae62","Type":"ContainerStarted","Data":"4dad95f84b1508f60e0c95f16081cf94cf217bdd0becbb8e7e69c4809e23e174"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.656361 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" event={"ID":"7818ef80-fbf1-48f4-8921-84b4d693ae62","Type":"ContainerStarted","Data":"8c0c8df36412dd4ffa7e6f872f65e578d105f27ddea7d891bce3c8fa0ffbe54c"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.656726 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.657798 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" event={"ID":"c6add969-5d7d-4966-bcaf-46cc6e60c3ed","Type":"ContainerStarted","Data":"3a13f177fa1f41ef8f2c5d29cfef0245954776b639e918fd57524c6c7085c5e6"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.659117 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" event={"ID":"d63625f4-7c2b-4196-951e-99c5978046e7","Type":"ContainerStarted","Data":"c468a5a433ac4f14a06018f8efccd0bf671e52b37ee793db84e1f7bba55228e0"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.671258 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k6mqf" podStartSLOduration=128.671228372 podStartE2EDuration="2m8.671228372s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:26.666047919 +0000 UTC m=+152.759929760" watchObservedRunningTime="2026-01-31 05:41:26.671228372 +0000 UTC m=+152.765110213" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.678775 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.678929 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.17890409 +0000 UTC m=+153.272785931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.679281 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.679753 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.17973576 +0000 UTC m=+153.273617601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.686015 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" event={"ID":"352255e7-59b9-4e55-8a87-92e73729fa80","Type":"ContainerStarted","Data":"9a47ec476b2e2987d36fb9192dd9d5da1a4bd05d6cae9cba0d56e235ef7bab49"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.692525 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" event={"ID":"c4e74709-7867-4c48-b77f-9e0e7f441b39","Type":"ContainerStarted","Data":"5ccf0ebc67ad024631901bfdeddfc354e9da90275bb7f90a111b2af7c22f2f56"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.693855 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" podStartSLOduration=128.693832493 podStartE2EDuration="2m8.693832493s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:26.693523615 +0000 UTC m=+152.787405456" watchObservedRunningTime="2026-01-31 05:41:26.693832493 +0000 UTC m=+152.787714334" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.706234 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" event={"ID":"85b78da7-6c11-44e7-9551-c482fcc56080","Type":"ContainerStarted","Data":"931575c43fd550c19af545796003d61192f62ff56ad217351a76c990a927b888"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.714660 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.716118 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" event={"ID":"747781a6-dea8-4fed-af24-371ba391d6c0","Type":"ContainerStarted","Data":"fe06a9f1a3b8af4a9716843ab0698f21acc0321df32dae7afcf89cc7148061b2"} Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.740027 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.741076 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.741522 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pc46d" podStartSLOduration=128.741510618 podStartE2EDuration="2m8.741510618s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:26.739982929 +0000 UTC m=+152.833864770" watchObservedRunningTime="2026-01-31 05:41:26.741510618 +0000 UTC m=+152.835392449" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.748926 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.755409 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-n5lvh"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.776711 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.781785 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.783435 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.283419055 +0000 UTC m=+153.377300896 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.802959 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5bc9r" podStartSLOduration=129.802934947 podStartE2EDuration="2m9.802934947s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:26.775350418 +0000 UTC m=+152.869232259" watchObservedRunningTime="2026-01-31 05:41:26.802934947 +0000 UTC m=+152.896816788" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.806502 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.826598 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hc54l"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.830232 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-ldwpn"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.852190 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.864271 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.905201 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:26 crc kubenswrapper[4712]: E0131 05:41:26.905846 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.405813971 +0000 UTC m=+153.499695822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.932885 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.942346 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr"] Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.944561 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:26 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:26 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:26 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:26 crc kubenswrapper[4712]: I0131 05:41:26.944617 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.007784 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.008117 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.5081014 +0000 UTC m=+153.601983231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.009383 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q"] Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.012652 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.021822 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zsktt"] Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.068345 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln"] Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.089430 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xgchj"] Jan 31 05:41:27 crc kubenswrapper[4712]: W0131 05:41:27.096232 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-25bbdd55bc2a1da3b06f8764a16823909e89cdf95cc967df1bf83b28a0669070 WatchSource:0}: Error finding container 25bbdd55bc2a1da3b06f8764a16823909e89cdf95cc967df1bf83b28a0669070: Status 404 returned error can't find the container with id 25bbdd55bc2a1da3b06f8764a16823909e89cdf95cc967df1bf83b28a0669070 Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.102024 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9kwv8"] Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.120834 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.121101 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.621086914 +0000 UTC m=+153.714968755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.222909 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.223122 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.723022013 +0000 UTC m=+153.816903854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.223577 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.223908 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.723893196 +0000 UTC m=+153.817775037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.324957 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.325195 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.825160748 +0000 UTC m=+153.919042589 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.325244 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.325552 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.825545588 +0000 UTC m=+153.919427429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.426379 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.426522 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.926488173 +0000 UTC m=+154.020370014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.427151 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.427462 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:27.927451577 +0000 UTC m=+154.021333418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.528150 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.528579 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.028539675 +0000 UTC m=+154.122421516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.529089 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.529615 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.029596682 +0000 UTC m=+154.123478533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.631223 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.631607 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.131570963 +0000 UTC m=+154.225452804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.733455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.734656 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.234638692 +0000 UTC m=+154.328520533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.831646 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-ldwpn" event={"ID":"1561c218-0068-4279-99f0-b2417059eeed","Type":"ContainerStarted","Data":"17dfb18414e369f665db2e28d91c5d1748199576b7098e7602934cb6ea264df5"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.831695 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-ldwpn" event={"ID":"1561c218-0068-4279-99f0-b2417059eeed","Type":"ContainerStarted","Data":"334f3418ae9369c7125a946b6db90c7d00ffcd36b9a02aef1012c21b6dcebd4c"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.838687 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.839059 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.339043915 +0000 UTC m=+154.432925756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.867611 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" event={"ID":"5bb6c725-d007-4aff-9396-1cc16fec2a1a","Type":"ContainerStarted","Data":"586322f7da84a3bb9773c49995875dad3ca93635ad0ad6ac7660463455ee6798"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.900504 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" event={"ID":"0fd4c024-c591-4752-bd9e-7bf028811d24","Type":"ContainerStarted","Data":"2f53f2a9d626336cea45c3bbb9648efb1269873ca8bafaecd2795a65fd61c04a"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.900574 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" event={"ID":"0fd4c024-c591-4752-bd9e-7bf028811d24","Type":"ContainerStarted","Data":"19b5e4202467af10dc9d593967e36ebb853b1894cb40ad6a6b04d274b86d5455"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.902140 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.925498 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:27 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:27 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:27 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.925557 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.936300 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" event={"ID":"21b71afe-6ea3-40c2-9d8a-d500fc90f7af","Type":"ContainerStarted","Data":"2deb74821de3003b25a76cded6faf8f1f6fc9b6a8d002f500d8d88077fda18aa"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.937137 4712 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-zw9hr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.937210 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" podUID="0fd4c024-c591-4752-bd9e-7bf028811d24" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.940132 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:27 crc kubenswrapper[4712]: E0131 05:41:27.941980 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.44196291 +0000 UTC m=+154.535844751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.944369 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" event={"ID":"661bf4e2-2297-4862-9a82-9af65c1dd20c","Type":"ContainerStarted","Data":"34a65757dbf194fb65995dacfab9fb6d93596149e48ff72f04e9c2a73a0aa4f5"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.944429 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" event={"ID":"661bf4e2-2297-4862-9a82-9af65c1dd20c","Type":"ContainerStarted","Data":"c45a4b45534a67da320bd8d3d58505bcdea81f88152c6c36431c6ba87acc79a3"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.954798 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" event={"ID":"26a2a101-f807-429c-bd37-cbfc78572fdd","Type":"ContainerStarted","Data":"b7ed3dde2c86e981379a86e20f0e0bfaa8bea0973750e6caeb0d23cc08dae285"} Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.959982 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" podStartSLOduration=129.959966063 podStartE2EDuration="2m9.959966063s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:27.959222453 +0000 UTC m=+154.053104294" watchObservedRunningTime="2026-01-31 05:41:27.959966063 +0000 UTC m=+154.053847904" Jan 31 05:41:27 crc kubenswrapper[4712]: I0131 05:41:27.960875 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-ldwpn" podStartSLOduration=7.960868275 podStartE2EDuration="7.960868275s" podCreationTimestamp="2026-01-31 05:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:27.883408795 +0000 UTC m=+153.977290636" watchObservedRunningTime="2026-01-31 05:41:27.960868275 +0000 UTC m=+154.054750116" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.007594 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" event={"ID":"747781a6-dea8-4fed-af24-371ba391d6c0","Type":"ContainerStarted","Data":"96d160c0541e3bc0ee3c3e6dc573777c02b87f43d6c1bc6c891082bbe4b7e618"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.008375 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" podStartSLOduration=130.008356896 podStartE2EDuration="2m10.008356896s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.007457143 +0000 UTC m=+154.101338984" watchObservedRunningTime="2026-01-31 05:41:28.008356896 +0000 UTC m=+154.102238737" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.020556 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" event={"ID":"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9","Type":"ContainerStarted","Data":"4be71baaf01e4051aa66708b4a150ff74b5a010be24a626ad6c5eb97ce79969c"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.035480 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xgchj" event={"ID":"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881","Type":"ContainerStarted","Data":"031bff7bb6c18b32d4f81b0d3962cdec2faf93a49db31584b9027da3eef85b0a"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.037363 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" event={"ID":"dc0eb91d-cc30-4ef8-aa0b-be90744ba313","Type":"ContainerStarted","Data":"b542b7f4037b069286d476f43c8826fdb94fd91c7734d4a099b7126981459244"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.042995 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.044375 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.544353431 +0000 UTC m=+154.638235272 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.047226 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" event={"ID":"c4e74709-7867-4c48-b77f-9e0e7f441b39","Type":"ContainerStarted","Data":"96867c72dd57a36d95fb1615531c2ac3e3da7cf251bbd1f00649edead7c6d2a3"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.048330 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.049983 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" event={"ID":"c6add969-5d7d-4966-bcaf-46cc6e60c3ed","Type":"ContainerStarted","Data":"715154b39258ee819f44fb9980453d342f1abdd0d5a76a50a768bafc2c41d1f9"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.051163 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"25bbdd55bc2a1da3b06f8764a16823909e89cdf95cc967df1bf83b28a0669070"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.067078 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-bdt97" podStartSLOduration=130.067050494 podStartE2EDuration="2m10.067050494s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.060963068 +0000 UTC m=+154.154844909" watchObservedRunningTime="2026-01-31 05:41:28.067050494 +0000 UTC m=+154.160932335" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.073332 4712 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-wshvv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.073394 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" podUID="c4e74709-7867-4c48-b77f-9e0e7f441b39" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.108397 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"6126d69492d6a7d964b5f13e886e738e23691de3d40574053f3bf51957f24d95"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.124795 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-swhvs" podStartSLOduration=130.124772728 podStartE2EDuration="2m10.124772728s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.121187966 +0000 UTC m=+154.215069807" watchObservedRunningTime="2026-01-31 05:41:28.124772728 +0000 UTC m=+154.218654569" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.127444 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" event={"ID":"d63625f4-7c2b-4196-951e-99c5978046e7","Type":"ContainerStarted","Data":"efb3eb8201f5342d749d7d2cfd543b9baf74284326ac40544042d6dddc4be7a5"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.146823 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.175341 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.675311146 +0000 UTC m=+154.769192987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.183807 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" podStartSLOduration=130.183788785 podStartE2EDuration="2m10.183788785s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.18361514 +0000 UTC m=+154.277496971" watchObservedRunningTime="2026-01-31 05:41:28.183788785 +0000 UTC m=+154.277670626" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.210591 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" event={"ID":"0bd1b455-98b1-4cdc-b930-6c7a251f39a7","Type":"ContainerStarted","Data":"08fa922a2499b2b6dc7c8b1ea280fbf47b8d3c3b5e6cd57a3b4f47dda612e332"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.210658 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" event={"ID":"0bd1b455-98b1-4cdc-b930-6c7a251f39a7","Type":"ContainerStarted","Data":"e9fdbc776d1b2a9a9118baadcc9d1f75818bee87eeca2a59616abeff7ade4c86"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.212423 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.222656 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerStarted","Data":"745a5b0bed7c89cb58e6aa5149cffacce321df7f13ac2b76036b176c8f17098d"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.227699 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"810ef1ef4e77736b8fc11108aecbd9c1bc040350df61de411e486b3c2042f3fa"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.249632 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.249969 4712 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qls5q container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.250023 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" podUID="0bd1b455-98b1-4cdc-b930-6c7a251f39a7" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.250209 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.750182951 +0000 UTC m=+154.844064792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.250324 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" event={"ID":"f8f21b29-8318-4378-98cc-a7af9ee0b36e","Type":"ContainerStarted","Data":"ec1a7616bfff33904e7c3d48f2ed2614ce65470864e0a36496001817065a383b"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.275494 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" event={"ID":"b7673fef-02b4-4d38-bc24-998bd8ee4434","Type":"ContainerStarted","Data":"3b0e9804219f00b1cf3f9781a84ec15e2d798c9cf9442ece635869aeea1a1187"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.275566 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" event={"ID":"b7673fef-02b4-4d38-bc24-998bd8ee4434","Type":"ContainerStarted","Data":"7589763c296512e85dd101febd3438f82c31fb07f02381035db98a5941ac9b75"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.289074 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" event={"ID":"7dad1b83-c7ab-4bc8-862b-78668f39bf8d","Type":"ContainerStarted","Data":"fe61bc8d1985978dc6120cc94810b8fa4db6b17c3008e64e6b03ba83ad1c24a6"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.322001 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" event={"ID":"352255e7-59b9-4e55-8a87-92e73729fa80","Type":"ContainerStarted","Data":"b6efa650843ab169563406e948b5440d0b4408a2e9616e52e9ff3c08433fd871"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.343803 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" event={"ID":"4a308f02-3cb6-4226-85a1-4a82e2289551","Type":"ContainerStarted","Data":"87a397305d75cbf0f1178eefc7216902a614ef49c256de770c394ecfb8029626"} Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.343866 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.353381 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.354825 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.85480929 +0000 UTC m=+154.948691131 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.355052 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" podStartSLOduration=130.355028216 podStartE2EDuration="2m10.355028216s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.34858568 +0000 UTC m=+154.442467511" watchObservedRunningTime="2026-01-31 05:41:28.355028216 +0000 UTC m=+154.448910047" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.358897 4712 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zsktt container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.358944 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.368534 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-klknc" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.397739 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.398234 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.429897 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" podStartSLOduration=131.429865279 podStartE2EDuration="2m11.429865279s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.421653017 +0000 UTC m=+154.515534858" watchObservedRunningTime="2026-01-31 05:41:28.429865279 +0000 UTC m=+154.523747120" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.454826 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.455903 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:28.955869017 +0000 UTC m=+155.049750858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.530225 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" podStartSLOduration=131.530209078 podStartE2EDuration="2m11.530209078s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.459065449 +0000 UTC m=+154.552947300" watchObservedRunningTime="2026-01-31 05:41:28.530209078 +0000 UTC m=+154.624090919" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.532113 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" podStartSLOduration=130.532105026 podStartE2EDuration="2m10.532105026s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.529690115 +0000 UTC m=+154.623571956" watchObservedRunningTime="2026-01-31 05:41:28.532105026 +0000 UTC m=+154.625986867" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.558455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.559161 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.059141122 +0000 UTC m=+155.153022963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.599682 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-n5lvh" podStartSLOduration=130.599664423 podStartE2EDuration="2m10.599664423s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.598661867 +0000 UTC m=+154.692543708" watchObservedRunningTime="2026-01-31 05:41:28.599664423 +0000 UTC m=+154.693546264" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.600729 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" podStartSLOduration=131.60072402 podStartE2EDuration="2m11.60072402s" podCreationTimestamp="2026-01-31 05:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:28.566213293 +0000 UTC m=+154.660095134" watchObservedRunningTime="2026-01-31 05:41:28.60072402 +0000 UTC m=+154.694605861" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.660656 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.661085 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.16105897 +0000 UTC m=+155.254940811 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.765455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.766029 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.266011637 +0000 UTC m=+155.359893478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.833860 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-lmtl8" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.869368 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.870947 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.370899813 +0000 UTC m=+155.464781654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.907262 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:28 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:28 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:28 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.907347 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:28 crc kubenswrapper[4712]: I0131 05:41:28.971235 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:28 crc kubenswrapper[4712]: E0131 05:41:28.971695 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.471673313 +0000 UTC m=+155.565555154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.073327 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.073925 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.57389405 +0000 UTC m=+155.667775891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.175642 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.176198 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.676156728 +0000 UTC m=+155.770038569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.277298 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.277570 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.777528524 +0000 UTC m=+155.871410355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.278161 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.278617 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.778605651 +0000 UTC m=+155.872487492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.350650 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" event={"ID":"dc0eb91d-cc30-4ef8-aa0b-be90744ba313","Type":"ContainerStarted","Data":"be87612bf7b1cda0641162dd0fe81bc87fa6ebb3272e364f12a471322d0c88d7"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.353788 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jvq7b" event={"ID":"f8f21b29-8318-4378-98cc-a7af9ee0b36e","Type":"ContainerStarted","Data":"20899c8fcf8037609ee3f8a77258bcf85f44dcb59e7c2af8f98b2333cc96bd9a"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.356138 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2ccc0fdc24e28fb68cc235ad2c630f34c206746bb1b7dd4e59a4d6763fe63d27"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.359153 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" event={"ID":"26a2a101-f807-429c-bd37-cbfc78572fdd","Type":"ContainerStarted","Data":"a3e45ccd8a56c59cfc530ccba0011603c179187e343ee0163b99bced802f4b44"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.361248 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xgchj" event={"ID":"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881","Type":"ContainerStarted","Data":"86e64fbbfe2b4ec0bd2079989dd5eb38b31f7e3a1589e5bce8574e71747d1888"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.367399 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xhtln" event={"ID":"21b71afe-6ea3-40c2-9d8a-d500fc90f7af","Type":"ContainerStarted","Data":"963be601858d3d9e3ff141347d74478b3275c73faeffe70497931d8bbe6193f2"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.369481 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerStarted","Data":"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.369830 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.371258 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"cd6c69adf7d1bfa54614a81dda1ac5ffe0f7d6e4da6d929144f45d0e0a6e0328"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.371964 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.372281 4712 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-zkv2d container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.372327 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.375124 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" event={"ID":"661bf4e2-2297-4862-9a82-9af65c1dd20c","Type":"ContainerStarted","Data":"9eac886fdd29715ecc12b90f84c64d47cceb7246b3b9481edb41a4e42423a4ee"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.376763 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" event={"ID":"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9","Type":"ContainerStarted","Data":"ffdc053f2a27ea2c106f6182dc97daae82206580ec7c1ed64d8d2879df64d3e4"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.378137 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" event={"ID":"4a308f02-3cb6-4226-85a1-4a82e2289551","Type":"ContainerStarted","Data":"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.378612 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.378831 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.878790716 +0000 UTC m=+155.972672557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.378882 4712 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zsktt container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.378954 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.379147 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.380286 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.880269404 +0000 UTC m=+155.974151245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.381371 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"32cc9c8320942fe91e075856ac14426a8aa004a12922efa96b472725fbe46092"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.391650 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" event={"ID":"7dad1b83-c7ab-4bc8-862b-78668f39bf8d","Type":"ContainerStarted","Data":"298df98efa2779a45cfad38893554bf27b23429e13d7d7a2182c78473ea2ce69"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.400096 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" event={"ID":"d63625f4-7c2b-4196-951e-99c5978046e7","Type":"ContainerStarted","Data":"d48ed4e94b17ce5fa0e2d19967bb7cab8cae5d6fe72597d10e8144d989f6bea2"} Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.402318 4712 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qls5q container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.402393 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" podUID="0bd1b455-98b1-4cdc-b930-6c7a251f39a7" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.406342 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qdzvl" podStartSLOduration=131.406321814 podStartE2EDuration="2m11.406321814s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:29.391524583 +0000 UTC m=+155.485406424" watchObservedRunningTime="2026-01-31 05:41:29.406321814 +0000 UTC m=+155.500203655" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.413157 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wshvv" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.441810 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" podStartSLOduration=131.441787325 podStartE2EDuration="2m11.441787325s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:29.425597348 +0000 UTC m=+155.519479189" watchObservedRunningTime="2026-01-31 05:41:29.441787325 +0000 UTC m=+155.535669166" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.482470 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.483562 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:29.983532088 +0000 UTC m=+156.077413929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.512902 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.527729 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2kqpg" podStartSLOduration=131.527705433 podStartE2EDuration="2m11.527705433s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:29.472301729 +0000 UTC m=+155.566183570" watchObservedRunningTime="2026-01-31 05:41:29.527705433 +0000 UTC m=+155.621587274" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.527892 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.027872517 +0000 UTC m=+156.121754358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.616317 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.616841 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.116812983 +0000 UTC m=+156.210694824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.629939 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-24xjr" podStartSLOduration=131.629918359 podStartE2EDuration="2m11.629918359s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:29.579572056 +0000 UTC m=+155.673453897" watchObservedRunningTime="2026-01-31 05:41:29.629918359 +0000 UTC m=+155.723800201" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.672256 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lcb4j" podStartSLOduration=131.672236548 podStartE2EDuration="2m11.672236548s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:29.669377604 +0000 UTC m=+155.763259445" watchObservedRunningTime="2026-01-31 05:41:29.672236548 +0000 UTC m=+155.766118389" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.719925 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.720603 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.22058448 +0000 UTC m=+156.314466321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.820668 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.820858 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.320821976 +0000 UTC m=+156.414703817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.821429 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.821763 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.321745889 +0000 UTC m=+156.415627730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.899747 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:29 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:29 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:29 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.899816 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.922765 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.922968 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.422935651 +0000 UTC m=+156.516817492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:29 crc kubenswrapper[4712]: I0131 05:41:29.923047 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:29 crc kubenswrapper[4712]: E0131 05:41:29.923487 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.423469844 +0000 UTC m=+156.517351685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.024487 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.024766 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.524725076 +0000 UTC m=+156.618606917 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.126760 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.127314 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.627284033 +0000 UTC m=+156.721165874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.152077 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zw9hr" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.227981 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.228444 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.728413801 +0000 UTC m=+156.822295642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.329947 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.330509 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.830485825 +0000 UTC m=+156.924367666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.413946 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" event={"ID":"728479af-3ccc-4ab2-8f85-e5bc28a6e7d9","Type":"ContainerStarted","Data":"408373913e216073ca1fc198c051c85040ffb823acb55d4a6347267c83bcefc2"} Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.415643 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" event={"ID":"5bb6c725-d007-4aff-9396-1cc16fec2a1a","Type":"ContainerStarted","Data":"d13912d340410a7d14335481f1f3615ccd8bc919196517b219acbd6e0a7ab552"} Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.417488 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xgchj" event={"ID":"11d0bb16-cbd3-4f4e-8b48-9f5676ec5881","Type":"ContainerStarted","Data":"d758b7afa8c4e32899a1487f4ad82b8164f42a32246b78c116bb614038882ea4"} Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.418752 4712 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-zkv2d container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.418859 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.431250 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.431431 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.931399108 +0000 UTC m=+157.025280949 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.431729 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.432340 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:30.932316321 +0000 UTC m=+157.026198152 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.480577 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qls5q" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.529360 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hc54l" podStartSLOduration=132.529340295 podStartE2EDuration="2m12.529340295s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:30.526871011 +0000 UTC m=+156.620752852" watchObservedRunningTime="2026-01-31 05:41:30.529340295 +0000 UTC m=+156.623222126" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.533543 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.533814 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.033767619 +0000 UTC m=+157.127649460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.534130 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.537577 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.037548766 +0000 UTC m=+157.131430597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.643888 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.644283 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.144263959 +0000 UTC m=+157.238145800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.752441 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.252419368 +0000 UTC m=+157.346301209 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.763720 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.867962 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.868378 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.368358788 +0000 UTC m=+157.462240629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.899602 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:30 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:30 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:30 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.899661 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:30 crc kubenswrapper[4712]: I0131 05:41:30.972131 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:30 crc kubenswrapper[4712]: E0131 05:41:30.972475 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.472463323 +0000 UTC m=+157.566345154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.073715 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.074116 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.574102256 +0000 UTC m=+157.667984087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.175676 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.176049 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.676033805 +0000 UTC m=+157.769915646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.283840 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.284135 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.784118523 +0000 UTC m=+157.878000364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.289367 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.318600 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xgchj" podStartSLOduration=11.318582928 podStartE2EDuration="11.318582928s" podCreationTimestamp="2026-01-31 05:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:30.867481995 +0000 UTC m=+156.961363846" watchObservedRunningTime="2026-01-31 05:41:31.318582928 +0000 UTC m=+157.412464769" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.330746 4712 patch_prober.go:28] interesting pod/apiserver-76f77b778f-6xfbd container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]log ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]etcd ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/generic-apiserver-start-informers ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/max-in-flight-filter ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 31 05:41:31 crc kubenswrapper[4712]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 31 05:41:31 crc kubenswrapper[4712]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/project.openshift.io-projectcache ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 31 05:41:31 crc kubenswrapper[4712]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 31 05:41:31 crc kubenswrapper[4712]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 31 05:41:31 crc kubenswrapper[4712]: livez check failed Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.330827 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" podUID="352255e7-59b9-4e55-8a87-92e73729fa80" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.385333 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.385679 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.885665902 +0000 UTC m=+157.979547743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.430700 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" event={"ID":"5bb6c725-d007-4aff-9396-1cc16fec2a1a","Type":"ContainerStarted","Data":"46b570edbffb43b2f0d4463836bd29bcc9748fa280f244042a739997bb22f16b"} Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.431139 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.486060 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.486425 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:31.986406641 +0000 UTC m=+158.080288492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.590032 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.590530 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.090501907 +0000 UTC m=+158.184383938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.691393 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.691697 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.191663267 +0000 UTC m=+158.285545108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.692146 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.692554 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.19253753 +0000 UTC m=+158.286419371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.793576 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.793845 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.293802452 +0000 UTC m=+158.387684293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.793908 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.794373 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.294365876 +0000 UTC m=+158.388247717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.819493 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.820642 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.825533 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.834558 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.894574 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.895005 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.394973282 +0000 UTC m=+158.488855133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.899656 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:31 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:31 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:31 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.899858 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.909668 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.910465 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.914873 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.915036 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.927528 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.996864 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.996908 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.996943 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.996972 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.997114 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:31 crc kubenswrapper[4712]: I0131 05:41:31.997316 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg7z7\" (UniqueName: \"kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:31 crc kubenswrapper[4712]: E0131 05:41:31.997356 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.497339322 +0000 UTC m=+158.591221163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.017093 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.020305 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: W0131 05:41:32.025262 4712 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.025332 4712 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.080871 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.099307 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.599279852 +0000 UTC m=+158.693161693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099443 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg7z7\" (UniqueName: \"kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099584 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099615 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099650 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099678 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099704 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.099856 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.100103 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.100342 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.100503 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.600484123 +0000 UTC m=+158.694365964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.119766 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg7z7\" (UniqueName: \"kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7\") pod \"community-operators-dpr8n\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.123455 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.210807 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.211952 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.225287 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.228331 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229059 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229206 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncfzm\" (UniqueName: \"kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229306 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229379 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.229494 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.729477258 +0000 UTC m=+158.823359089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229576 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbb49\" (UniqueName: \"kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.229660 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.232504 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.238078 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.338858 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339243 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339265 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncfzm\" (UniqueName: \"kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339300 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.339527 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.839514527 +0000 UTC m=+158.933396368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339582 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339303 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339619 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339642 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbb49\" (UniqueName: \"kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339665 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.339885 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.340081 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.341358 4712 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.360552 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncfzm\" (UniqueName: \"kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm\") pod \"community-operators-4mdhz\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.366166 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbb49\" (UniqueName: \"kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49\") pod \"certified-operators-njk9h\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.434634 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.442485 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.442916 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:32.942895153 +0000 UTC m=+159.036776994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.462977 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.463168 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.476928 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" event={"ID":"7dad1b83-c7ab-4bc8-862b-78668f39bf8d","Type":"ContainerDied","Data":"298df98efa2779a45cfad38893554bf27b23429e13d7d7a2182c78473ea2ce69"} Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.471589 4712 generic.go:334] "Generic (PLEG): container finished" podID="7dad1b83-c7ab-4bc8-862b-78668f39bf8d" containerID="298df98efa2779a45cfad38893554bf27b23429e13d7d7a2182c78473ea2ce69" exitCode=0 Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.495266 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" event={"ID":"5bb6c725-d007-4aff-9396-1cc16fec2a1a","Type":"ContainerStarted","Data":"4d9beb5a1dc1ad059b1b40e1211d16a61c46227b99266a1cf073e3e23c96cfa2"} Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.495304 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" event={"ID":"5bb6c725-d007-4aff-9396-1cc16fec2a1a","Type":"ContainerStarted","Data":"dd8da095f2af5c6a06afa470251eeee084cd319b5fe9b2efde9a36a1c267f66c"} Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.528766 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.544309 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.544364 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.544434 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.544480 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55xg9\" (UniqueName: \"kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.544764 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:33.044751491 +0000 UTC m=+159.138633332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.645926 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.646464 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.646540 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55xg9\" (UniqueName: \"kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.646582 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.647361 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.647812 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-31 05:41:33.147786879 +0000 UTC m=+159.241668720 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.648314 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.669132 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55xg9\" (UniqueName: \"kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9\") pod \"certified-operators-vqrgc\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.677134 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9kwv8" podStartSLOduration=12.677116733 podStartE2EDuration="12.677116733s" podCreationTimestamp="2026-01-31 05:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:32.539735962 +0000 UTC m=+158.633617803" watchObservedRunningTime="2026-01-31 05:41:32.677116733 +0000 UTC m=+158.770998574" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.680029 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.723146 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.723929 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.726312 4712 patch_prober.go:28] interesting pod/console-f9d7485db-8qb8j container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.726393 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8qb8j" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerName="console" probeResult="failure" output="Get \"https://10.217.0.30:8443/health\": dial tcp 10.217.0.30:8443: connect: connection refused" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.747653 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: E0131 05:41:32.748850 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-31 05:41:33.248831966 +0000 UTC m=+159.342713807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nlhkt" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.805816 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.830707 4712 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-31T05:41:32.341372184Z","Handler":null,"Name":""} Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.836808 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.836884 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.836913 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.837090 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.837355 4712 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.837388 4712 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.849201 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.860065 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.876331 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:41:32 crc kubenswrapper[4712]: W0131 05:41:32.883209 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c321fbf_244e_403c_a1c0_18b136eb9995.slice/crio-54559a754a9bab540d3e8ce1074cea7ecf53ba932f551af13be10d67fb951d7f WatchSource:0}: Error finding container 54559a754a9bab540d3e8ce1074cea7ecf53ba932f551af13be10d67fb951d7f: Status 404 returned error can't find the container with id 54559a754a9bab540d3e8ce1074cea7ecf53ba932f551af13be10d67fb951d7f Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.890416 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.894875 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:32 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:32 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:32 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.894979 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.950851 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.955029 4712 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.955082 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.982385 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nlhkt\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:32 crc kubenswrapper[4712]: I0131 05:41:32.999071 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.282495 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:41:33 crc kubenswrapper[4712]: W0131 05:41:33.294149 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfecbebbb_c701_40e7_9755_5ff54d25523d.slice/crio-e034e259ee43464adfbf05cfdce3a880d7ddd0f5965b083bdd3917482da21652 WatchSource:0}: Error finding container e034e259ee43464adfbf05cfdce3a880d7ddd0f5965b083bdd3917482da21652: Status 404 returned error can't find the container with id e034e259ee43464adfbf05cfdce3a880d7ddd0f5965b083bdd3917482da21652 Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.303654 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.307904 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.311006 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.405637 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.416037 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-6xfbd" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.585029 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a7c2b70c-170d-425a-8281-2ff0b950ff29","Type":"ContainerStarted","Data":"54a85b5389677cbc3d6e72a87541eea68cbe2103a3faec656ea42881bbcfa7a3"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.585098 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a7c2b70c-170d-425a-8281-2ff0b950ff29","Type":"ContainerStarted","Data":"3f0f133154f16d9d7e092328014db0975323c71828a198fd27b5429b083f87c6"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.618572 4712 generic.go:334] "Generic (PLEG): container finished" podID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerID="2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d" exitCode=0 Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.619034 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerDied","Data":"2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.619126 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerStarted","Data":"54559a754a9bab540d3e8ce1074cea7ecf53ba932f551af13be10d67fb951d7f"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.624413 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.635595 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.646227 4712 generic.go:334] "Generic (PLEG): container finished" podID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerID="3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3" exitCode=0 Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.646803 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerDied","Data":"3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.647605 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerStarted","Data":"92715f8aef6b9af3aa96bfc0d5fa2b72ab536c01ed08c64f054bb7b5f3fc7fb9"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.674769 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.674743391 podStartE2EDuration="2.674743391s" podCreationTimestamp="2026-01-31 05:41:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:33.663957285 +0000 UTC m=+159.757839126" watchObservedRunningTime="2026-01-31 05:41:33.674743391 +0000 UTC m=+159.768625232" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.687364 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" event={"ID":"fecbebbb-c701-40e7-9755-5ff54d25523d","Type":"ContainerStarted","Data":"e034e259ee43464adfbf05cfdce3a880d7ddd0f5965b083bdd3917482da21652"} Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.688397 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.836427 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.837917 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.845720 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.853079 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.900659 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:33 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:33 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:33 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.900715 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.909211 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.909312 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" podStartSLOduration=135.90929842 podStartE2EDuration="2m15.90929842s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:41:33.902193217 +0000 UTC m=+159.996075058" watchObservedRunningTime="2026-01-31 05:41:33.90929842 +0000 UTC m=+160.003180261" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.965037 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.977222 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.977317 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkl9v\" (UniqueName: \"kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:33 crc kubenswrapper[4712]: I0131 05:41:33.977367 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.081681 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.081782 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkl9v\" (UniqueName: \"kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.081814 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.082623 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.082910 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.108309 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkl9v\" (UniqueName: \"kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v\") pod \"redhat-marketplace-cpm9t\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.163561 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.209164 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.210393 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.214628 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.224743 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388089 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5bl9\" (UniqueName: \"kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9\") pod \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388226 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume\") pod \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388355 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume\") pod \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\" (UID: \"7dad1b83-c7ab-4bc8-862b-78668f39bf8d\") " Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388532 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388620 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz2cb\" (UniqueName: \"kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.388684 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.391560 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume" (OuterVolumeSpecName: "config-volume") pod "7dad1b83-c7ab-4bc8-862b-78668f39bf8d" (UID: "7dad1b83-c7ab-4bc8-862b-78668f39bf8d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.399223 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7dad1b83-c7ab-4bc8-862b-78668f39bf8d" (UID: "7dad1b83-c7ab-4bc8-862b-78668f39bf8d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.399347 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9" (OuterVolumeSpecName: "kube-api-access-s5bl9") pod "7dad1b83-c7ab-4bc8-862b-78668f39bf8d" (UID: "7dad1b83-c7ab-4bc8-862b-78668f39bf8d"). InnerVolumeSpecName "kube-api-access-s5bl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.489503 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.490021 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.490101 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz2cb\" (UniqueName: \"kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.490201 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5bl9\" (UniqueName: \"kubernetes.io/projected/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-kube-api-access-s5bl9\") on node \"crc\" DevicePath \"\"" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.490214 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.490224 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7dad1b83-c7ab-4bc8-862b-78668f39bf8d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.493260 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.493329 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.508549 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz2cb\" (UniqueName: \"kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb\") pod \"redhat-marketplace-qtnz9\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.516089 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.549626 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.705635 4712 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerID="e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8" exitCode=0 Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.705714 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerDied","Data":"e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.705767 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerStarted","Data":"1fc7e7c03d1d5269d450b0702d103c61ecfc1c4f9cebfccbf7b280c310921154"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.731926 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.737660 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" event={"ID":"fecbebbb-c701-40e7-9755-5ff54d25523d","Type":"ContainerStarted","Data":"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.762293 4712 generic.go:334] "Generic (PLEG): container finished" podID="a7c2b70c-170d-425a-8281-2ff0b950ff29" containerID="54a85b5389677cbc3d6e72a87541eea68cbe2103a3faec656ea42881bbcfa7a3" exitCode=0 Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.762348 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a7c2b70c-170d-425a-8281-2ff0b950ff29","Type":"ContainerDied","Data":"54a85b5389677cbc3d6e72a87541eea68cbe2103a3faec656ea42881bbcfa7a3"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.772452 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" event={"ID":"7dad1b83-c7ab-4bc8-862b-78668f39bf8d","Type":"ContainerDied","Data":"fe61bc8d1985978dc6120cc94810b8fa4db6b17c3008e64e6b03ba83ad1c24a6"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.772515 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe61bc8d1985978dc6120cc94810b8fa4db6b17c3008e64e6b03ba83ad1c24a6" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.772630 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.778232 4712 generic.go:334] "Generic (PLEG): container finished" podID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerID="eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f" exitCode=0 Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.778297 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerDied","Data":"eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.778330 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerStarted","Data":"6c605b958008fe759397d9ee623d0e069985cb8b41c976eb78346ecc4456e985"} Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.899736 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:34 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:34 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:34 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.900243 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:34 crc kubenswrapper[4712]: I0131 05:41:34.997408 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.007062 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:41:35 crc kubenswrapper[4712]: E0131 05:41:35.007319 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dad1b83-c7ab-4bc8-862b-78668f39bf8d" containerName="collect-profiles" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.007543 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dad1b83-c7ab-4bc8-862b-78668f39bf8d" containerName="collect-profiles" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.007710 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dad1b83-c7ab-4bc8-862b-78668f39bf8d" containerName="collect-profiles" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.008474 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.010678 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.030579 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.112739 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz56g\" (UniqueName: \"kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.112809 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.112846 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.214083 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz56g\" (UniqueName: \"kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.214153 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.214211 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.214894 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.218660 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.252704 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz56g\" (UniqueName: \"kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g\") pod \"redhat-operators-djd7s\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.334489 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.420396 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.421424 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.453620 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.522357 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.522426 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcfkb\" (UniqueName: \"kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.522450 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.625848 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcfkb\" (UniqueName: \"kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.625890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.625965 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.626531 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.626990 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.678405 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcfkb\" (UniqueName: \"kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb\") pod \"redhat-operators-8z9q4\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.805416 4712 generic.go:334] "Generic (PLEG): container finished" podID="86d1e607-e026-4540-a6b0-fab85244efd3" containerID="f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4" exitCode=0 Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.805527 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerDied","Data":"f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4"} Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.805623 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerStarted","Data":"dc854ee3e6dca8de7c2379e7af447e0e2d78bb671cab300c4e15e841063ce0af"} Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.809561 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerStarted","Data":"91e8d3820db669feee54aef758cfbbe8d47c570efefb968a1c95fd136e463826"} Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.814206 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.878696 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.898004 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:35 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:35 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:35 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:35 crc kubenswrapper[4712]: I0131 05:41:35.898090 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:35 crc kubenswrapper[4712]: W0131 05:41:35.899725 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd110bc72_b196_442c_9ea7_f1460a0b2bf4.slice/crio-b03cb638853368c53d1b2ee77eeeb100717a65a4207e7dc35192f9e5285c2397 WatchSource:0}: Error finding container b03cb638853368c53d1b2ee77eeeb100717a65a4207e7dc35192f9e5285c2397: Status 404 returned error can't find the container with id b03cb638853368c53d1b2ee77eeeb100717a65a4207e7dc35192f9e5285c2397 Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.071577 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.134475 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.140019 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir\") pod \"a7c2b70c-170d-425a-8281-2ff0b950ff29\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.140150 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a7c2b70c-170d-425a-8281-2ff0b950ff29" (UID: "a7c2b70c-170d-425a-8281-2ff0b950ff29"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.141915 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access\") pod \"a7c2b70c-170d-425a-8281-2ff0b950ff29\" (UID: \"a7c2b70c-170d-425a-8281-2ff0b950ff29\") " Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.144306 4712 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a7c2b70c-170d-425a-8281-2ff0b950ff29-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.150236 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a7c2b70c-170d-425a-8281-2ff0b950ff29" (UID: "a7c2b70c-170d-425a-8281-2ff0b950ff29"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:41:36 crc kubenswrapper[4712]: W0131 05:41:36.150932 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b20baa4_578c_4f78_ba6b_27b05d32ab85.slice/crio-9561a4ce9786c939310b14156c993ba1af314dc138aaa547510ead9a435c2efb WatchSource:0}: Error finding container 9561a4ce9786c939310b14156c993ba1af314dc138aaa547510ead9a435c2efb: Status 404 returned error can't find the container with id 9561a4ce9786c939310b14156c993ba1af314dc138aaa547510ead9a435c2efb Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.245500 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7c2b70c-170d-425a-8281-2ff0b950ff29-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.820960 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerStarted","Data":"9561a4ce9786c939310b14156c993ba1af314dc138aaa547510ead9a435c2efb"} Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.824455 4712 generic.go:334] "Generic (PLEG): container finished" podID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerID="4ca0c14647f2268ded7843aa7a95a6d0e7f044b893dbcaf86983fcaf3d934833" exitCode=0 Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.824515 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerDied","Data":"4ca0c14647f2268ded7843aa7a95a6d0e7f044b893dbcaf86983fcaf3d934833"} Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.831769 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a7c2b70c-170d-425a-8281-2ff0b950ff29","Type":"ContainerDied","Data":"3f0f133154f16d9d7e092328014db0975323c71828a198fd27b5429b083f87c6"} Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.831813 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f0f133154f16d9d7e092328014db0975323c71828a198fd27b5429b083f87c6" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.831904 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.841739 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerStarted","Data":"b03cb638853368c53d1b2ee77eeeb100717a65a4207e7dc35192f9e5285c2397"} Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.900054 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:36 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:36 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:36 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:36 crc kubenswrapper[4712]: I0131 05:41:36.900489 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.853378 4712 generic.go:334] "Generic (PLEG): container finished" podID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerID="e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082" exitCode=0 Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.853452 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerDied","Data":"e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082"} Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.879322 4712 generic.go:334] "Generic (PLEG): container finished" podID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerID="5b72f995449e9417ba09eb00914464f22ba85a18f5ebf3eaa3892235d9e9945a" exitCode=0 Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.879828 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerDied","Data":"5b72f995449e9417ba09eb00914464f22ba85a18f5ebf3eaa3892235d9e9945a"} Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.894932 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:37 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:37 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:37 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:37 crc kubenswrapper[4712]: I0131 05:41:37.895017 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:38 crc kubenswrapper[4712]: I0131 05:41:38.895745 4712 patch_prober.go:28] interesting pod/router-default-5444994796-rhq9p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 31 05:41:38 crc kubenswrapper[4712]: [-]has-synced failed: reason withheld Jan 31 05:41:38 crc kubenswrapper[4712]: [+]process-running ok Jan 31 05:41:38 crc kubenswrapper[4712]: healthz check failed Jan 31 05:41:38 crc kubenswrapper[4712]: I0131 05:41:38.895831 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rhq9p" podUID="e6a5dfee-6def-4a85-9e8b-854f91517c58" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 05:41:38 crc kubenswrapper[4712]: I0131 05:41:38.952357 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xgchj" Jan 31 05:41:39 crc kubenswrapper[4712]: I0131 05:41:39.894863 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:39 crc kubenswrapper[4712]: I0131 05:41:39.897698 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-rhq9p" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.353981 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.539094 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 31 05:41:40 crc kubenswrapper[4712]: E0131 05:41:40.544290 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c2b70c-170d-425a-8281-2ff0b950ff29" containerName="pruner" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.544321 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c2b70c-170d-425a-8281-2ff0b950ff29" containerName="pruner" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.544468 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c2b70c-170d-425a-8281-2ff0b950ff29" containerName="pruner" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.544961 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.546379 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.548419 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.551301 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.555288 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.574893 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03cf41cd-8606-4e98-a290-023fbe7d0956-metrics-certs\") pod \"network-metrics-daemon-5svzb\" (UID: \"03cf41cd-8606-4e98-a290-023fbe7d0956\") " pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.647838 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.647941 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.754037 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.754119 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.754246 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.787420 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.868628 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5svzb" Jan 31 05:41:40 crc kubenswrapper[4712]: I0131 05:41:40.908936 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.499823 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.499920 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.739588 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.748465 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.837327 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.837633 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.837476 4712 patch_prober.go:28] interesting pod/downloads-7954f5f757-s5vdj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Jan 31 05:41:42 crc kubenswrapper[4712]: I0131 05:41:42.837984 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-s5vdj" podUID="930eb788-1c1a-41e5-8989-7fbbf25c5da0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Jan 31 05:41:52 crc kubenswrapper[4712]: I0131 05:41:52.858084 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-s5vdj" Jan 31 05:41:53 crc kubenswrapper[4712]: I0131 05:41:53.005546 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:42:03 crc kubenswrapper[4712]: I0131 05:42:03.559589 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6lmn7" Jan 31 05:42:04 crc kubenswrapper[4712]: I0131 05:42:04.970749 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 31 05:42:07 crc kubenswrapper[4712]: E0131 05:42:07.670852 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 31 05:42:07 crc kubenswrapper[4712]: E0131 05:42:07.671037 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dbb49,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-njk9h_openshift-marketplace(1fd9f25f-4108-411d-a106-3d94f4afe2bf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:07 crc kubenswrapper[4712]: E0131 05:42:07.672259 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-njk9h" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.337073 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.338237 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.355408 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.422569 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.422665 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.497054 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.497124 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.524100 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.524183 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.524252 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.571145 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:12 crc kubenswrapper[4712]: I0131 05:42:12.665153 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:13 crc kubenswrapper[4712]: E0131 05:42:13.974267 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-njk9h" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.232583 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.233014 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bz56g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-djd7s_openshift-marketplace(d110bc72-b196-442c-9ea7-f1460a0b2bf4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.234233 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-djd7s" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.253998 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.254194 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-55xg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vqrgc_openshift-marketplace(2fdbdcd7-a42e-418f-aa9e-81599576fac5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:14 crc kubenswrapper[4712]: E0131 05:42:14.255369 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vqrgc" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.541451 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.542923 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.555632 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.585542 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.586055 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.586100 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.687677 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.687745 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.687785 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.687817 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.687897 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.714009 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access\") pod \"installer-9-crc\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:16 crc kubenswrapper[4712]: I0131 05:42:16.867819 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:42:18 crc kubenswrapper[4712]: E0131 05:42:18.027101 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-djd7s" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" Jan 31 05:42:18 crc kubenswrapper[4712]: E0131 05:42:18.027071 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vqrgc" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.801489 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.802315 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dkl9v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-cpm9t_openshift-marketplace(86d1e607-e026-4540-a6b0-fab85244efd3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.803591 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-cpm9t" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.952439 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.952681 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qcfkb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-8z9q4_openshift-marketplace(2b20baa4-578c-4f78-ba6b-27b05d32ab85): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:19 crc kubenswrapper[4712]: E0131 05:42:19.954547 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-8z9q4" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" Jan 31 05:42:21 crc kubenswrapper[4712]: E0131 05:42:21.326017 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 31 05:42:21 crc kubenswrapper[4712]: E0131 05:42:21.326410 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wz2cb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qtnz9_openshift-marketplace(e3a3eb30-cb5f-473d-86dc-da8ddb6275fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:21 crc kubenswrapper[4712]: E0131 05:42:21.327905 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qtnz9" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.544903 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-qtnz9" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.544926 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-8z9q4" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.631876 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.632253 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ncfzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4mdhz_openshift-marketplace(2c321fbf-244e-403c-a1c0-18b136eb9995): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.634351 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4mdhz" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.644854 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.645019 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qg7z7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dpr8n_openshift-marketplace(b4b6ff77-738a-480a-b29c-30a4a0d42182): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 31 05:42:22 crc kubenswrapper[4712]: E0131 05:42:22.646196 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dpr8n" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" Jan 31 05:42:22 crc kubenswrapper[4712]: I0131 05:42:22.951235 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.057226 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5svzb"] Jan 31 05:42:23 crc kubenswrapper[4712]: W0131 05:42:23.068313 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03cf41cd_8606_4e98_a290_023fbe7d0956.slice/crio-eb072dc686952d6fcdce475388e591a33e14e681c73ec8e43dc33c24d480e4ed WatchSource:0}: Error finding container eb072dc686952d6fcdce475388e591a33e14e681c73ec8e43dc33c24d480e4ed: Status 404 returned error can't find the container with id eb072dc686952d6fcdce475388e591a33e14e681c73ec8e43dc33c24d480e4ed Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.128334 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.134612 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.319080 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"53026338-09b1-4e67-bc82-ab70f6737607","Type":"ContainerStarted","Data":"a465670a1d8708ceab759a1903da0a35abcce4a98950c81881ce0b4b092fa591"} Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.322643 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5svzb" event={"ID":"03cf41cd-8606-4e98-a290-023fbe7d0956","Type":"ContainerStarted","Data":"eb072dc686952d6fcdce475388e591a33e14e681c73ec8e43dc33c24d480e4ed"} Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.326325 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"52a81c91-0d8f-4691-a5b2-106bf4378643","Type":"ContainerStarted","Data":"94306dfd30058644554903ff7edc36402242d5d9948d905bd0fb38c7dae924bd"} Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.330632 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2b70821b-0bd2-414f-ab4b-80dc8f5883a3","Type":"ContainerStarted","Data":"3194be346f40b7baf5e414d58f8912011a49e69acb03f3628ba3d8f28a6be36c"} Jan 31 05:42:23 crc kubenswrapper[4712]: E0131 05:42:23.331686 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dpr8n" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" Jan 31 05:42:23 crc kubenswrapper[4712]: E0131 05:42:23.332418 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4mdhz" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" Jan 31 05:42:23 crc kubenswrapper[4712]: I0131 05:42:23.348631 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=7.348616038 podStartE2EDuration="7.348616038s" podCreationTimestamp="2026-01-31 05:42:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:42:23.347055448 +0000 UTC m=+209.440937289" watchObservedRunningTime="2026-01-31 05:42:23.348616038 +0000 UTC m=+209.442497879" Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.339102 4712 generic.go:334] "Generic (PLEG): container finished" podID="2b70821b-0bd2-414f-ab4b-80dc8f5883a3" containerID="5682219d26056315dfe299f3a20c0eee34fc22cb6e2c49f3ee0346d3188de5bf" exitCode=0 Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.339352 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2b70821b-0bd2-414f-ab4b-80dc8f5883a3","Type":"ContainerDied","Data":"5682219d26056315dfe299f3a20c0eee34fc22cb6e2c49f3ee0346d3188de5bf"} Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.343141 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5svzb" event={"ID":"03cf41cd-8606-4e98-a290-023fbe7d0956","Type":"ContainerStarted","Data":"3be71a8e7bc1ae5b2430cfc1a5b0d875d2f1b9784a99916b19d444ca86c362dc"} Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.343296 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5svzb" event={"ID":"03cf41cd-8606-4e98-a290-023fbe7d0956","Type":"ContainerStarted","Data":"898ff032f75178959aad5fcab926fa511213331eb1696cb7147d5517988b1d25"} Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.344931 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"52a81c91-0d8f-4691-a5b2-106bf4378643","Type":"ContainerStarted","Data":"0436d045912697bc84d49fef8f18eccd9afc352473b146d14d54715e725c6f08"} Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.346752 4712 generic.go:334] "Generic (PLEG): container finished" podID="53026338-09b1-4e67-bc82-ab70f6737607" containerID="b9f89860d18a18124211411e9a1104fc0725717c2ce864799179b5aee8b40957" exitCode=0 Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.346797 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"53026338-09b1-4e67-bc82-ab70f6737607","Type":"ContainerDied","Data":"b9f89860d18a18124211411e9a1104fc0725717c2ce864799179b5aee8b40957"} Jan 31 05:42:24 crc kubenswrapper[4712]: I0131 05:42:24.382350 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-5svzb" podStartSLOduration=186.382334025 podStartE2EDuration="3m6.382334025s" podCreationTimestamp="2026-01-31 05:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:42:24.379145213 +0000 UTC m=+210.473027054" watchObservedRunningTime="2026-01-31 05:42:24.382334025 +0000 UTC m=+210.476215866" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.602671 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.607730 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728358 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access\") pod \"53026338-09b1-4e67-bc82-ab70f6737607\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728490 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access\") pod \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728552 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir\") pod \"53026338-09b1-4e67-bc82-ab70f6737607\" (UID: \"53026338-09b1-4e67-bc82-ab70f6737607\") " Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728567 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir\") pod \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\" (UID: \"2b70821b-0bd2-414f-ab4b-80dc8f5883a3\") " Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728713 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "53026338-09b1-4e67-bc82-ab70f6737607" (UID: "53026338-09b1-4e67-bc82-ab70f6737607"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728788 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2b70821b-0bd2-414f-ab4b-80dc8f5883a3" (UID: "2b70821b-0bd2-414f-ab4b-80dc8f5883a3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728850 4712 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/53026338-09b1-4e67-bc82-ab70f6737607-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.728861 4712 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.735893 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2b70821b-0bd2-414f-ab4b-80dc8f5883a3" (UID: "2b70821b-0bd2-414f-ab4b-80dc8f5883a3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.736306 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "53026338-09b1-4e67-bc82-ab70f6737607" (UID: "53026338-09b1-4e67-bc82-ab70f6737607"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.830345 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b70821b-0bd2-414f-ab4b-80dc8f5883a3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:25 crc kubenswrapper[4712]: I0131 05:42:25.830390 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/53026338-09b1-4e67-bc82-ab70f6737607-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.360188 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2b70821b-0bd2-414f-ab4b-80dc8f5883a3","Type":"ContainerDied","Data":"3194be346f40b7baf5e414d58f8912011a49e69acb03f3628ba3d8f28a6be36c"} Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.360697 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3194be346f40b7baf5e414d58f8912011a49e69acb03f3628ba3d8f28a6be36c" Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.360357 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.363820 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"53026338-09b1-4e67-bc82-ab70f6737607","Type":"ContainerDied","Data":"a465670a1d8708ceab759a1903da0a35abcce4a98950c81881ce0b4b092fa591"} Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.363875 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a465670a1d8708ceab759a1903da0a35abcce4a98950c81881ce0b4b092fa591" Jan 31 05:42:26 crc kubenswrapper[4712]: I0131 05:42:26.363952 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 31 05:42:29 crc kubenswrapper[4712]: I0131 05:42:29.386294 4712 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerID="d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2" exitCode=0 Jan 31 05:42:29 crc kubenswrapper[4712]: I0131 05:42:29.386409 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerDied","Data":"d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2"} Jan 31 05:42:30 crc kubenswrapper[4712]: I0131 05:42:30.397617 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerStarted","Data":"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff"} Jan 31 05:42:30 crc kubenswrapper[4712]: I0131 05:42:30.416921 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-njk9h" podStartSLOduration=4.267573776 podStartE2EDuration="59.416903172s" podCreationTimestamp="2026-01-31 05:41:31 +0000 UTC" firstStartedPulling="2026-01-31 05:41:34.711048154 +0000 UTC m=+160.804929995" lastFinishedPulling="2026-01-31 05:42:29.86037755 +0000 UTC m=+215.954259391" observedRunningTime="2026-01-31 05:42:30.41564102 +0000 UTC m=+216.509522861" watchObservedRunningTime="2026-01-31 05:42:30.416903172 +0000 UTC m=+216.510785013" Jan 31 05:42:32 crc kubenswrapper[4712]: I0131 05:42:32.438820 4712 generic.go:334] "Generic (PLEG): container finished" podID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerID="993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167" exitCode=0 Jan 31 05:42:32 crc kubenswrapper[4712]: I0131 05:42:32.438947 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerDied","Data":"993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167"} Jan 31 05:42:33 crc kubenswrapper[4712]: I0131 05:42:33.308694 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:42:33 crc kubenswrapper[4712]: I0131 05:42:33.309253 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:42:33 crc kubenswrapper[4712]: I0131 05:42:33.482282 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:42:34 crc kubenswrapper[4712]: I0131 05:42:34.454235 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerStarted","Data":"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63"} Jan 31 05:42:34 crc kubenswrapper[4712]: I0131 05:42:34.488938 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vqrgc" podStartSLOduration=3.863878241 podStartE2EDuration="1m2.488911422s" podCreationTimestamp="2026-01-31 05:41:32 +0000 UTC" firstStartedPulling="2026-01-31 05:41:34.780862488 +0000 UTC m=+160.874744329" lastFinishedPulling="2026-01-31 05:42:33.405895669 +0000 UTC m=+219.499777510" observedRunningTime="2026-01-31 05:42:34.486062449 +0000 UTC m=+220.579944300" watchObservedRunningTime="2026-01-31 05:42:34.488911422 +0000 UTC m=+220.582793273" Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.472649 4712 generic.go:334] "Generic (PLEG): container finished" podID="86d1e607-e026-4540-a6b0-fab85244efd3" containerID="e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1" exitCode=0 Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.472743 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerDied","Data":"e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1"} Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.479464 4712 generic.go:334] "Generic (PLEG): container finished" podID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerID="eaff49fa8ab66379509afb9a74fafcf37ef62edf6d92469b106970e34b72276d" exitCode=0 Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.479584 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerDied","Data":"eaff49fa8ab66379509afb9a74fafcf37ef62edf6d92469b106970e34b72276d"} Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.485519 4712 generic.go:334] "Generic (PLEG): container finished" podID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerID="bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88" exitCode=0 Jan 31 05:42:36 crc kubenswrapper[4712]: I0131 05:42:36.485582 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerDied","Data":"bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.494470 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerStarted","Data":"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.499556 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerStarted","Data":"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.501975 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerStarted","Data":"4c5e51808d0b342fe6f6cdb6e578af9b5bfaba273e60f105634e6e591ad15c0e"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.505151 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerStarted","Data":"b7cde776c0a52263d83ab75b207066a9a8311910c011efcb256f187064a2096f"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.509626 4712 generic.go:334] "Generic (PLEG): container finished" podID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerID="315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97" exitCode=0 Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.509665 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerDied","Data":"315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97"} Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.520229 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-djd7s" podStartSLOduration=4.464772314 podStartE2EDuration="1m3.520202206s" podCreationTimestamp="2026-01-31 05:41:34 +0000 UTC" firstStartedPulling="2026-01-31 05:41:37.868261434 +0000 UTC m=+163.962143285" lastFinishedPulling="2026-01-31 05:42:36.923691336 +0000 UTC m=+223.017573177" observedRunningTime="2026-01-31 05:42:37.518740668 +0000 UTC m=+223.612622509" watchObservedRunningTime="2026-01-31 05:42:37.520202206 +0000 UTC m=+223.614084047" Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.581569 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qtnz9" podStartSLOduration=3.542314137 podStartE2EDuration="1m3.581549883s" podCreationTimestamp="2026-01-31 05:41:34 +0000 UTC" firstStartedPulling="2026-01-31 05:41:36.826296036 +0000 UTC m=+162.920177877" lastFinishedPulling="2026-01-31 05:42:36.865531782 +0000 UTC m=+222.959413623" observedRunningTime="2026-01-31 05:42:37.577794797 +0000 UTC m=+223.671676638" watchObservedRunningTime="2026-01-31 05:42:37.581549883 +0000 UTC m=+223.675431724" Jan 31 05:42:37 crc kubenswrapper[4712]: I0131 05:42:37.634313 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cpm9t" podStartSLOduration=4.543943339 podStartE2EDuration="1m4.634290478s" podCreationTimestamp="2026-01-31 05:41:33 +0000 UTC" firstStartedPulling="2026-01-31 05:41:36.849287967 +0000 UTC m=+162.943169808" lastFinishedPulling="2026-01-31 05:42:36.939635106 +0000 UTC m=+223.033516947" observedRunningTime="2026-01-31 05:42:37.621476929 +0000 UTC m=+223.715358790" watchObservedRunningTime="2026-01-31 05:42:37.634290478 +0000 UTC m=+223.728172319" Jan 31 05:42:38 crc kubenswrapper[4712]: I0131 05:42:38.521196 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerStarted","Data":"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a"} Jan 31 05:42:38 crc kubenswrapper[4712]: I0131 05:42:38.526308 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerStarted","Data":"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340"} Jan 31 05:42:38 crc kubenswrapper[4712]: I0131 05:42:38.528449 4712 generic.go:334] "Generic (PLEG): container finished" podID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerID="4c5e51808d0b342fe6f6cdb6e578af9b5bfaba273e60f105634e6e591ad15c0e" exitCode=0 Jan 31 05:42:38 crc kubenswrapper[4712]: I0131 05:42:38.528507 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerDied","Data":"4c5e51808d0b342fe6f6cdb6e578af9b5bfaba273e60f105634e6e591ad15c0e"} Jan 31 05:42:38 crc kubenswrapper[4712]: I0131 05:42:38.552684 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4mdhz" podStartSLOduration=2.198009539 podStartE2EDuration="1m6.5526605s" podCreationTimestamp="2026-01-31 05:41:32 +0000 UTC" firstStartedPulling="2026-01-31 05:41:33.624007718 +0000 UTC m=+159.717889559" lastFinishedPulling="2026-01-31 05:42:37.978658679 +0000 UTC m=+224.072540520" observedRunningTime="2026-01-31 05:42:38.550739711 +0000 UTC m=+224.644621562" watchObservedRunningTime="2026-01-31 05:42:38.5526605 +0000 UTC m=+224.646542341" Jan 31 05:42:39 crc kubenswrapper[4712]: I0131 05:42:39.536419 4712 generic.go:334] "Generic (PLEG): container finished" podID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerID="9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340" exitCode=0 Jan 31 05:42:39 crc kubenswrapper[4712]: I0131 05:42:39.536511 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerDied","Data":"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340"} Jan 31 05:42:39 crc kubenswrapper[4712]: I0131 05:42:39.543303 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerStarted","Data":"38907b2a23191589b2c2f762e6ec3f961f33d1cd2e637c382adac3adb075f905"} Jan 31 05:42:39 crc kubenswrapper[4712]: I0131 05:42:39.592016 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8z9q4" podStartSLOduration=3.549166844 podStartE2EDuration="1m4.591989591s" podCreationTimestamp="2026-01-31 05:41:35 +0000 UTC" firstStartedPulling="2026-01-31 05:41:37.882602263 +0000 UTC m=+163.976484104" lastFinishedPulling="2026-01-31 05:42:38.92542501 +0000 UTC m=+225.019306851" observedRunningTime="2026-01-31 05:42:39.588022608 +0000 UTC m=+225.681904469" watchObservedRunningTime="2026-01-31 05:42:39.591989591 +0000 UTC m=+225.685871422" Jan 31 05:42:41 crc kubenswrapper[4712]: I0131 05:42:41.558563 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerStarted","Data":"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca"} Jan 31 05:42:41 crc kubenswrapper[4712]: I0131 05:42:41.585104 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dpr8n" podStartSLOduration=3.642735189 podStartE2EDuration="1m10.585077733s" podCreationTimestamp="2026-01-31 05:41:31 +0000 UTC" firstStartedPulling="2026-01-31 05:41:33.651385992 +0000 UTC m=+159.745267833" lastFinishedPulling="2026-01-31 05:42:40.593728536 +0000 UTC m=+226.687610377" observedRunningTime="2026-01-31 05:42:41.583132453 +0000 UTC m=+227.677014304" watchObservedRunningTime="2026-01-31 05:42:41.585077733 +0000 UTC m=+227.678959574" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.132547 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zsktt"] Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.226797 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.226863 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.497807 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.497929 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.498007 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.498837 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.498976 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44" gracePeriod=600 Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.529999 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.530076 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:42 crc kubenswrapper[4712]: I0131 05:42:42.578036 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.279450 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-dpr8n" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="registry-server" probeResult="failure" output=< Jan 31 05:42:43 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 05:42:43 crc kubenswrapper[4712]: > Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.311760 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.311809 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.363012 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.379938 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.573743 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44" exitCode=0 Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.573826 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44"} Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.573874 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5"} Jan 31 05:42:43 crc kubenswrapper[4712]: I0131 05:42:43.633008 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.164048 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.164460 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.213747 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.551632 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.552523 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.603295 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:44 crc kubenswrapper[4712]: I0131 05:42:44.636851 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.335564 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.336159 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.387582 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.632125 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.638407 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.815079 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.815135 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.830146 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.830441 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vqrgc" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="registry-server" containerID="cri-o://b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63" gracePeriod=2 Jan 31 05:42:45 crc kubenswrapper[4712]: I0131 05:42:45.862512 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.224747 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.348590 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content\") pod \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.348652 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55xg9\" (UniqueName: \"kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9\") pod \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.348745 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities\") pod \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\" (UID: \"2fdbdcd7-a42e-418f-aa9e-81599576fac5\") " Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.349945 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities" (OuterVolumeSpecName: "utilities") pod "2fdbdcd7-a42e-418f-aa9e-81599576fac5" (UID: "2fdbdcd7-a42e-418f-aa9e-81599576fac5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.362431 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9" (OuterVolumeSpecName: "kube-api-access-55xg9") pod "2fdbdcd7-a42e-418f-aa9e-81599576fac5" (UID: "2fdbdcd7-a42e-418f-aa9e-81599576fac5"). InnerVolumeSpecName "kube-api-access-55xg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.401636 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fdbdcd7-a42e-418f-aa9e-81599576fac5" (UID: "2fdbdcd7-a42e-418f-aa9e-81599576fac5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.450735 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.450799 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55xg9\" (UniqueName: \"kubernetes.io/projected/2fdbdcd7-a42e-418f-aa9e-81599576fac5-kube-api-access-55xg9\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.450817 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fdbdcd7-a42e-418f-aa9e-81599576fac5-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.598997 4712 generic.go:334] "Generic (PLEG): container finished" podID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerID="b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63" exitCode=0 Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.599100 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqrgc" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.599076 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerDied","Data":"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63"} Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.599191 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqrgc" event={"ID":"2fdbdcd7-a42e-418f-aa9e-81599576fac5","Type":"ContainerDied","Data":"6c605b958008fe759397d9ee623d0e069985cb8b41c976eb78346ecc4456e985"} Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.599225 4712 scope.go:117] "RemoveContainer" containerID="b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.629389 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.633356 4712 scope.go:117] "RemoveContainer" containerID="993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.634612 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vqrgc"] Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.661495 4712 scope.go:117] "RemoveContainer" containerID="eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.682120 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.684698 4712 scope.go:117] "RemoveContainer" containerID="b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63" Jan 31 05:42:46 crc kubenswrapper[4712]: E0131 05:42:46.685428 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63\": container with ID starting with b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63 not found: ID does not exist" containerID="b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.685512 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63"} err="failed to get container status \"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63\": rpc error: code = NotFound desc = could not find container \"b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63\": container with ID starting with b82fcbf68e7d8ab4fe9bf79b64213ace6fdf722ccd44d0a02aa70ecffd194f63 not found: ID does not exist" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.685557 4712 scope.go:117] "RemoveContainer" containerID="993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167" Jan 31 05:42:46 crc kubenswrapper[4712]: E0131 05:42:46.686489 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167\": container with ID starting with 993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167 not found: ID does not exist" containerID="993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.686537 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167"} err="failed to get container status \"993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167\": rpc error: code = NotFound desc = could not find container \"993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167\": container with ID starting with 993d5bbcfe03e384ce0159c5653bce34a3cd08ab260e74fcc2a7d82b16f94167 not found: ID does not exist" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.686574 4712 scope.go:117] "RemoveContainer" containerID="eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f" Jan 31 05:42:46 crc kubenswrapper[4712]: E0131 05:42:46.686885 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f\": container with ID starting with eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f not found: ID does not exist" containerID="eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f" Jan 31 05:42:46 crc kubenswrapper[4712]: I0131 05:42:46.686910 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f"} err="failed to get container status \"eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f\": rpc error: code = NotFound desc = could not find container \"eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f\": container with ID starting with eb6fa1df9adc941e1cd3e60e2746088432f5c2866f8584613d5edb3ad1c7559f not found: ID does not exist" Jan 31 05:42:47 crc kubenswrapper[4712]: I0131 05:42:47.637559 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:42:47 crc kubenswrapper[4712]: I0131 05:42:47.638241 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qtnz9" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="registry-server" containerID="cri-o://b7cde776c0a52263d83ab75b207066a9a8311910c011efcb256f187064a2096f" gracePeriod=2 Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.232116 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.513241 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" path="/var/lib/kubelet/pods/2fdbdcd7-a42e-418f-aa9e-81599576fac5/volumes" Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.617527 4712 generic.go:334] "Generic (PLEG): container finished" podID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerID="b7cde776c0a52263d83ab75b207066a9a8311910c011efcb256f187064a2096f" exitCode=0 Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.617803 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8z9q4" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="registry-server" containerID="cri-o://38907b2a23191589b2c2f762e6ec3f961f33d1cd2e637c382adac3adb075f905" gracePeriod=2 Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.618189 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerDied","Data":"b7cde776c0a52263d83ab75b207066a9a8311910c011efcb256f187064a2096f"} Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.843722 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.991130 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz2cb\" (UniqueName: \"kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb\") pod \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.991303 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities\") pod \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.991385 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content\") pod \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\" (UID: \"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa\") " Jan 31 05:42:48 crc kubenswrapper[4712]: I0131 05:42:48.992401 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities" (OuterVolumeSpecName: "utilities") pod "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" (UID: "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:48.998351 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb" (OuterVolumeSpecName: "kube-api-access-wz2cb") pod "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" (UID: "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa"). InnerVolumeSpecName "kube-api-access-wz2cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.013418 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" (UID: "e3a3eb30-cb5f-473d-86dc-da8ddb6275fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.093710 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.093749 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.093765 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz2cb\" (UniqueName: \"kubernetes.io/projected/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa-kube-api-access-wz2cb\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.628198 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtnz9" event={"ID":"e3a3eb30-cb5f-473d-86dc-da8ddb6275fa","Type":"ContainerDied","Data":"91e8d3820db669feee54aef758cfbbe8d47c570efefb968a1c95fd136e463826"} Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.628260 4712 scope.go:117] "RemoveContainer" containerID="b7cde776c0a52263d83ab75b207066a9a8311910c011efcb256f187064a2096f" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.628287 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtnz9" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.656764 4712 scope.go:117] "RemoveContainer" containerID="eaff49fa8ab66379509afb9a74fafcf37ef62edf6d92469b106970e34b72276d" Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.666366 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.669396 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtnz9"] Jan 31 05:42:49 crc kubenswrapper[4712]: I0131 05:42:49.685782 4712 scope.go:117] "RemoveContainer" containerID="4ca0c14647f2268ded7843aa7a95a6d0e7f044b893dbcaf86983fcaf3d934833" Jan 31 05:42:50 crc kubenswrapper[4712]: I0131 05:42:50.519421 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" path="/var/lib/kubelet/pods/e3a3eb30-cb5f-473d-86dc-da8ddb6275fa/volumes" Jan 31 05:42:50 crc kubenswrapper[4712]: I0131 05:42:50.636046 4712 generic.go:334] "Generic (PLEG): container finished" podID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerID="38907b2a23191589b2c2f762e6ec3f961f33d1cd2e637c382adac3adb075f905" exitCode=0 Jan 31 05:42:50 crc kubenswrapper[4712]: I0131 05:42:50.636131 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerDied","Data":"38907b2a23191589b2c2f762e6ec3f961f33d1cd2e637c382adac3adb075f905"} Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.477396 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.637727 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities" (OuterVolumeSpecName: "utilities") pod "2b20baa4-578c-4f78-ba6b-27b05d32ab85" (UID: "2b20baa4-578c-4f78-ba6b-27b05d32ab85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.636333 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities\") pod \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.638162 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcfkb\" (UniqueName: \"kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb\") pod \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.640420 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content\") pod \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\" (UID: \"2b20baa4-578c-4f78-ba6b-27b05d32ab85\") " Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.641213 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.649773 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb" (OuterVolumeSpecName: "kube-api-access-qcfkb") pod "2b20baa4-578c-4f78-ba6b-27b05d32ab85" (UID: "2b20baa4-578c-4f78-ba6b-27b05d32ab85"). InnerVolumeSpecName "kube-api-access-qcfkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.651428 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8z9q4" event={"ID":"2b20baa4-578c-4f78-ba6b-27b05d32ab85","Type":"ContainerDied","Data":"9561a4ce9786c939310b14156c993ba1af314dc138aaa547510ead9a435c2efb"} Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.651487 4712 scope.go:117] "RemoveContainer" containerID="38907b2a23191589b2c2f762e6ec3f961f33d1cd2e637c382adac3adb075f905" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.651626 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8z9q4" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.742523 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcfkb\" (UniqueName: \"kubernetes.io/projected/2b20baa4-578c-4f78-ba6b-27b05d32ab85-kube-api-access-qcfkb\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.809408 4712 scope.go:117] "RemoveContainer" containerID="4c5e51808d0b342fe6f6cdb6e578af9b5bfaba273e60f105634e6e591ad15c0e" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.828629 4712 scope.go:117] "RemoveContainer" containerID="5b72f995449e9417ba09eb00914464f22ba85a18f5ebf3eaa3892235d9e9945a" Jan 31 05:42:51 crc kubenswrapper[4712]: I0131 05:42:51.988638 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b20baa4-578c-4f78-ba6b-27b05d32ab85" (UID: "2b20baa4-578c-4f78-ba6b-27b05d32ab85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.047680 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b20baa4-578c-4f78-ba6b-27b05d32ab85-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.291085 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.293903 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8z9q4"] Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.297999 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.353522 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.511532 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" path="/var/lib/kubelet/pods/2b20baa4-578c-4f78-ba6b-27b05d32ab85/volumes" Jan 31 05:42:52 crc kubenswrapper[4712]: I0131 05:42:52.580293 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.029329 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.029998 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4mdhz" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="registry-server" containerID="cri-o://f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a" gracePeriod=2 Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.478705 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.611937 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncfzm\" (UniqueName: \"kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm\") pod \"2c321fbf-244e-403c-a1c0-18b136eb9995\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.611993 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities\") pod \"2c321fbf-244e-403c-a1c0-18b136eb9995\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.612074 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content\") pod \"2c321fbf-244e-403c-a1c0-18b136eb9995\" (UID: \"2c321fbf-244e-403c-a1c0-18b136eb9995\") " Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.613323 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities" (OuterVolumeSpecName: "utilities") pod "2c321fbf-244e-403c-a1c0-18b136eb9995" (UID: "2c321fbf-244e-403c-a1c0-18b136eb9995"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.619418 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm" (OuterVolumeSpecName: "kube-api-access-ncfzm") pod "2c321fbf-244e-403c-a1c0-18b136eb9995" (UID: "2c321fbf-244e-403c-a1c0-18b136eb9995"). InnerVolumeSpecName "kube-api-access-ncfzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.670608 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c321fbf-244e-403c-a1c0-18b136eb9995" (UID: "2c321fbf-244e-403c-a1c0-18b136eb9995"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.687806 4712 generic.go:334] "Generic (PLEG): container finished" podID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerID="f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a" exitCode=0 Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.687852 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerDied","Data":"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a"} Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.687926 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mdhz" event={"ID":"2c321fbf-244e-403c-a1c0-18b136eb9995","Type":"ContainerDied","Data":"54559a754a9bab540d3e8ce1074cea7ecf53ba932f551af13be10d67fb951d7f"} Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.687955 4712 scope.go:117] "RemoveContainer" containerID="f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.688123 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mdhz" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.714821 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.714863 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncfzm\" (UniqueName: \"kubernetes.io/projected/2c321fbf-244e-403c-a1c0-18b136eb9995-kube-api-access-ncfzm\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.714879 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c321fbf-244e-403c-a1c0-18b136eb9995-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.716948 4712 scope.go:117] "RemoveContainer" containerID="315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.730625 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.743121 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4mdhz"] Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.746939 4712 scope.go:117] "RemoveContainer" containerID="2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.770740 4712 scope.go:117] "RemoveContainer" containerID="f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a" Jan 31 05:42:56 crc kubenswrapper[4712]: E0131 05:42:56.771104 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a\": container with ID starting with f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a not found: ID does not exist" containerID="f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.771135 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a"} err="failed to get container status \"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a\": rpc error: code = NotFound desc = could not find container \"f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a\": container with ID starting with f9d81066f5afe15d479462534fa18221bd4643e994d1f5b495e4471b07312f7a not found: ID does not exist" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.771157 4712 scope.go:117] "RemoveContainer" containerID="315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97" Jan 31 05:42:56 crc kubenswrapper[4712]: E0131 05:42:56.771520 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97\": container with ID starting with 315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97 not found: ID does not exist" containerID="315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.771542 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97"} err="failed to get container status \"315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97\": rpc error: code = NotFound desc = could not find container \"315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97\": container with ID starting with 315536f60e531c22db03752d1db1fd95c2b97546f2d0f5258e6a1636d604bd97 not found: ID does not exist" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.771555 4712 scope.go:117] "RemoveContainer" containerID="2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d" Jan 31 05:42:56 crc kubenswrapper[4712]: E0131 05:42:56.771741 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d\": container with ID starting with 2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d not found: ID does not exist" containerID="2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d" Jan 31 05:42:56 crc kubenswrapper[4712]: I0131 05:42:56.771761 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d"} err="failed to get container status \"2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d\": rpc error: code = NotFound desc = could not find container \"2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d\": container with ID starting with 2bd6e9c0bc2c0b92cdabf6f9a88b93d18e454bd93c6fc845d1f383848f52419d not found: ID does not exist" Jan 31 05:42:58 crc kubenswrapper[4712]: I0131 05:42:58.516350 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" path="/var/lib/kubelet/pods/2c321fbf-244e-403c-a1c0-18b136eb9995/volumes" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.086453 4712 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087454 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087483 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087515 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087527 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087552 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087568 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087590 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087601 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087637 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087650 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087683 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087695 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087721 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087789 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087818 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b70821b-0bd2-414f-ab4b-80dc8f5883a3" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087832 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b70821b-0bd2-414f-ab4b-80dc8f5883a3" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087848 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087859 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087899 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087913 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087941 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53026338-09b1-4e67-bc82-ab70f6737607" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087953 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="53026338-09b1-4e67-bc82-ab70f6737607" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.087979 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.087990 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="extract-utilities" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.088017 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088029 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="extract-content" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.088054 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088067 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088446 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="53026338-09b1-4e67-bc82-ab70f6737607" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088487 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c321fbf-244e-403c-a1c0-18b136eb9995" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088518 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b70821b-0bd2-414f-ab4b-80dc8f5883a3" containerName="pruner" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088544 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3a3eb30-cb5f-473d-86dc-da8ddb6275fa" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088560 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fdbdcd7-a42e-418f-aa9e-81599576fac5" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.088589 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b20baa4-578c-4f78-ba6b-27b05d32ab85" containerName="registry-server" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.089418 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.093247 4712 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.093783 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf" gracePeriod=15 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.094076 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077" gracePeriod=15 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.094233 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9" gracePeriod=15 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.094243 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b" gracePeriod=15 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.094285 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae" gracePeriod=15 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.097745 4712 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098115 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098141 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098154 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098164 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098202 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098214 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098234 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098245 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098264 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098276 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098478 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098486 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098499 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098507 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.098518 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098527 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098688 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098701 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098710 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098724 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098735 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098745 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.098754 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.151658 4712 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.174911 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.174960 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.174990 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.175019 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.175043 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.175079 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.175100 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.175134 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276839 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276913 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276939 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276960 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276985 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.276984 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277035 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277000 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277077 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277077 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277112 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277246 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277276 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277581 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.277686 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.453445 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:01 crc kubenswrapper[4712]: W0131 05:43:01.478757 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-3051d98fa8f70fcbc8ed8cdb8fb41a30a41820b1fcfa977ab7fbd5cebf1a6169 WatchSource:0}: Error finding container 3051d98fa8f70fcbc8ed8cdb8fb41a30a41820b1fcfa977ab7fbd5cebf1a6169: Status 404 returned error can't find the container with id 3051d98fa8f70fcbc8ed8cdb8fb41a30a41820b1fcfa977ab7fbd5cebf1a6169 Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.484480 4712 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188fba72f54326fe openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-31 05:43:01.483751166 +0000 UTC m=+247.577633037,LastTimestamp:2026-01-31 05:43:01.483751166 +0000 UTC m=+247.577633037,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.583242 4712 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.584559 4712 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.585033 4712 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.585832 4712 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.586641 4712 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.586687 4712 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.587073 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="200ms" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.737204 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3051d98fa8f70fcbc8ed8cdb8fb41a30a41820b1fcfa977ab7fbd5cebf1a6169"} Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.739865 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.743014 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.743913 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077" exitCode=0 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.743959 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae" exitCode=0 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.743974 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b" exitCode=0 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.743988 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9" exitCode=2 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.744041 4712 scope.go:117] "RemoveContainer" containerID="ad30603f3e6670a768a6c6774cc2a4bd81ddbd43a4ac1e0c113de7bf4584d312" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.746319 4712 generic.go:334] "Generic (PLEG): container finished" podID="52a81c91-0d8f-4691-a5b2-106bf4378643" containerID="0436d045912697bc84d49fef8f18eccd9afc352473b146d14d54715e725c6f08" exitCode=0 Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.746364 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"52a81c91-0d8f-4691-a5b2-106bf4378643","Type":"ContainerDied","Data":"0436d045912697bc84d49fef8f18eccd9afc352473b146d14d54715e725c6f08"} Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.747145 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: I0131 05:43:01.747735 4712 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:01 crc kubenswrapper[4712]: E0131 05:43:01.788859 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="400ms" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.189820 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="800ms" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.517308 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:02Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:02Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:02Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:02Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:9bde862635f230b66b73aad05940f6cf2c0555a47fe1db330a20724acca8d497\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:db103f9b4d410efdd30da231ffebe8f093377e6c1e4064ddc68046925eb4627f\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1680805611},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:63fbea3b7080a0b403eaf16b3fed3ceda4cbba1fb0d71797d201d97e0745475c\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eecad2fc166355255907130f5b4a16ed876f792fe4420ae700dbc3741c3a382e\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202122991},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:514f0e3792045ec881b86653f42479597c72556731c6ca2e4c2dabbc1549aee3\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:a7f46ed7189598ac1c08c27d80acf72924f91cc58cb0dde5550553c7618a400f\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1188927217},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:420326d8488ceff2cde22ad8b85d739b0c254d47e703f7ddb1f08f77a48816a6\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:54817da328fa589491a3acbe80acdd88c0830dcc63aaafc08c3539925a1a3b03\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1180692192},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.518269 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.519199 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.519849 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.521322 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.521362 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:43:02 crc kubenswrapper[4712]: I0131 05:43:02.763111 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a"} Jan 31 05:43:02 crc kubenswrapper[4712]: I0131 05:43:02.764372 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.764509 4712 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:02 crc kubenswrapper[4712]: I0131 05:43:02.767982 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 31 05:43:02 crc kubenswrapper[4712]: E0131 05:43:02.994724 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="1.6s" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.038107 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.038705 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.213726 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access\") pod \"52a81c91-0d8f-4691-a5b2-106bf4378643\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.216415 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock\") pod \"52a81c91-0d8f-4691-a5b2-106bf4378643\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.216689 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir\") pod \"52a81c91-0d8f-4691-a5b2-106bf4378643\" (UID: \"52a81c91-0d8f-4691-a5b2-106bf4378643\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.216580 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock" (OuterVolumeSpecName: "var-lock") pod "52a81c91-0d8f-4691-a5b2-106bf4378643" (UID: "52a81c91-0d8f-4691-a5b2-106bf4378643"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.218118 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "52a81c91-0d8f-4691-a5b2-106bf4378643" (UID: "52a81c91-0d8f-4691-a5b2-106bf4378643"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.233409 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "52a81c91-0d8f-4691-a5b2-106bf4378643" (UID: "52a81c91-0d8f-4691-a5b2-106bf4378643"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.323055 4712 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-var-lock\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.323088 4712 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52a81c91-0d8f-4691-a5b2-106bf4378643-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.323098 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52a81c91-0d8f-4691-a5b2-106bf4378643-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.547021 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.547725 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.548257 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.548536 4712 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727244 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727538 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727629 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727340 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727561 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.727852 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.728135 4712 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.728240 4712 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.728330 4712 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.776009 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"52a81c91-0d8f-4691-a5b2-106bf4378643","Type":"ContainerDied","Data":"94306dfd30058644554903ff7edc36402242d5d9948d905bd0fb38c7dae924bd"} Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.776050 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94306dfd30058644554903ff7edc36402242d5d9948d905bd0fb38c7dae924bd" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.776789 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.778922 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.779949 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf" exitCode=0 Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.780034 4712 scope.go:117] "RemoveContainer" containerID="7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.780138 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.780839 4712 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.795772 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.796458 4712 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.799904 4712 scope.go:117] "RemoveContainer" containerID="c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.804466 4712 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.804667 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.814871 4712 scope.go:117] "RemoveContainer" containerID="85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.828854 4712 scope.go:117] "RemoveContainer" containerID="93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.850566 4712 scope.go:117] "RemoveContainer" containerID="a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.870588 4712 scope.go:117] "RemoveContainer" containerID="c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.887302 4712 scope.go:117] "RemoveContainer" containerID="7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.887597 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\": container with ID starting with 7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077 not found: ID does not exist" containerID="7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.887697 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077"} err="failed to get container status \"7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\": rpc error: code = NotFound desc = could not find container \"7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077\": container with ID starting with 7bc3e71bfe4344c6772a70948cd770e4ff177c24a11bdd85e0027064f8a99077 not found: ID does not exist" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.887801 4712 scope.go:117] "RemoveContainer" containerID="c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.888125 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\": container with ID starting with c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae not found: ID does not exist" containerID="c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888193 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae"} err="failed to get container status \"c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\": rpc error: code = NotFound desc = could not find container \"c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae\": container with ID starting with c3331bd8118d6d88d8620214946fc8742a601b31611e4fcffedafce5942415ae not found: ID does not exist" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888217 4712 scope.go:117] "RemoveContainer" containerID="85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.888409 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\": container with ID starting with 85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b not found: ID does not exist" containerID="85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888505 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b"} err="failed to get container status \"85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\": rpc error: code = NotFound desc = could not find container \"85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b\": container with ID starting with 85ecf6c20a87c8ac25f3293cdf567a6c2fd17929776baab9165af76a0043449b not found: ID does not exist" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888573 4712 scope.go:117] "RemoveContainer" containerID="93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.888834 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\": container with ID starting with 93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9 not found: ID does not exist" containerID="93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888857 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9"} err="failed to get container status \"93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\": rpc error: code = NotFound desc = could not find container \"93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9\": container with ID starting with 93d10ff11682fedec8b13b6bf69e4f7f714ded0d512296abf2f28844d80e16b9 not found: ID does not exist" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.888871 4712 scope.go:117] "RemoveContainer" containerID="a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.889083 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\": container with ID starting with a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf not found: ID does not exist" containerID="a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.889187 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf"} err="failed to get container status \"a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\": rpc error: code = NotFound desc = could not find container \"a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf\": container with ID starting with a43b60d33e3eb718285282f5c9fb75e733fe4ed0003ae4fd5e2310be813a4fdf not found: ID does not exist" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.889273 4712 scope.go:117] "RemoveContainer" containerID="c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7" Jan 31 05:43:03 crc kubenswrapper[4712]: E0131 05:43:03.889557 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\": container with ID starting with c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7 not found: ID does not exist" containerID="c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7" Jan 31 05:43:03 crc kubenswrapper[4712]: I0131 05:43:03.889584 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7"} err="failed to get container status \"c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\": rpc error: code = NotFound desc = could not find container \"c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7\": container with ID starting with c0cbb7ae6c332c4e44baa17dc707731da60653ed4a16c04dc4fc2df6b47fd9c7 not found: ID does not exist" Jan 31 05:43:04 crc kubenswrapper[4712]: I0131 05:43:04.512015 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:04 crc kubenswrapper[4712]: I0131 05:43:04.512309 4712 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:04 crc kubenswrapper[4712]: I0131 05:43:04.523478 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 31 05:43:04 crc kubenswrapper[4712]: E0131 05:43:04.596247 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="3.2s" Jan 31 05:43:05 crc kubenswrapper[4712]: E0131 05:43:05.773590 4712 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188fba72f54326fe openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-31 05:43:01.483751166 +0000 UTC m=+247.577633037,LastTimestamp:2026-01-31 05:43:01.483751166 +0000 UTC m=+247.577633037,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.168759 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" containerID="cri-o://b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55" gracePeriod=15 Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.630525 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.630925 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.631110 4712 status_manager.go:851] "Failed to get status for pod" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zsktt\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785343 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785451 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785522 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785593 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785641 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785691 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmmzx\" (UniqueName: \"kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785738 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785789 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785836 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785900 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785940 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.785999 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786046 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786101 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs\") pod \"4a308f02-3cb6-4226-85a1-4a82e2289551\" (UID: \"4a308f02-3cb6-4226-85a1-4a82e2289551\") " Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786118 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786336 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786648 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.786696 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.787796 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.788540 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.789119 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.796002 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.796768 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.797084 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.797265 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: E0131 05:43:07.797555 4712 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="6.4s" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.798120 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx" (OuterVolumeSpecName: "kube-api-access-dmmzx") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "kube-api-access-dmmzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.798559 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.798821 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.799006 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.799458 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4a308f02-3cb6-4226-85a1-4a82e2289551" (UID: "4a308f02-3cb6-4226-85a1-4a82e2289551"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.815250 4712 generic.go:334] "Generic (PLEG): container finished" podID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerID="b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55" exitCode=0 Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.815337 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" event={"ID":"4a308f02-3cb6-4226-85a1-4a82e2289551","Type":"ContainerDied","Data":"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55"} Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.815410 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" event={"ID":"4a308f02-3cb6-4226-85a1-4a82e2289551","Type":"ContainerDied","Data":"87a397305d75cbf0f1178eefc7216902a614ef49c256de770c394ecfb8029626"} Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.815453 4712 scope.go:117] "RemoveContainer" containerID="b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.815955 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.817731 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.818516 4712 status_manager.go:851] "Failed to get status for pod" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zsktt\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.870904 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.871837 4712 status_manager.go:851] "Failed to get status for pod" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zsktt\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.885732 4712 scope.go:117] "RemoveContainer" containerID="b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55" Jan 31 05:43:07 crc kubenswrapper[4712]: E0131 05:43:07.887364 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55\": container with ID starting with b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55 not found: ID does not exist" containerID="b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.887413 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55"} err="failed to get container status \"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55\": rpc error: code = NotFound desc = could not find container \"b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55\": container with ID starting with b1e702be4f4495f0b066b4a4ef1827d9d47ba735a672ac380803838d88b77a55 not found: ID does not exist" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.888927 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.888983 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889011 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889033 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889052 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889075 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmmzx\" (UniqueName: \"kubernetes.io/projected/4a308f02-3cb6-4226-85a1-4a82e2289551-kube-api-access-dmmzx\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889093 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889113 4712 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889135 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889157 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889211 4712 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a308f02-3cb6-4226-85a1-4a82e2289551-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:07 crc kubenswrapper[4712]: I0131 05:43:07.889231 4712 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a308f02-3cb6-4226-85a1-4a82e2289551-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:10 crc kubenswrapper[4712]: E0131 05:43:10.542046 4712 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" volumeName="registry-storage" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.504711 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.506063 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.506660 4712 status_manager.go:851] "Failed to get status for pod" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zsktt\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.531067 4712 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.531119 4712 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:11 crc kubenswrapper[4712]: E0131 05:43:11.531741 4712 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.532479 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:11 crc kubenswrapper[4712]: W0131 05:43:11.569846 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-0ece1c29d11a28d2e8dcd3fa63c07862a9b55d507999fa15cecdc47eb359e876 WatchSource:0}: Error finding container 0ece1c29d11a28d2e8dcd3fa63c07862a9b55d507999fa15cecdc47eb359e876: Status 404 returned error can't find the container with id 0ece1c29d11a28d2e8dcd3fa63c07862a9b55d507999fa15cecdc47eb359e876 Jan 31 05:43:11 crc kubenswrapper[4712]: I0131 05:43:11.850580 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0ece1c29d11a28d2e8dcd3fa63c07862a9b55d507999fa15cecdc47eb359e876"} Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.863286 4712 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="6cb7024637b55c10dda8787548000d798cf6e337aba4e7c44107993843ac145f" exitCode=0 Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.863398 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"6cb7024637b55c10dda8787548000d798cf6e337aba4e7c44107993843ac145f"} Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.864006 4712 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.864049 4712 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.865193 4712 status_manager.go:851] "Failed to get status for pod" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.865244 4712 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:12 crc kubenswrapper[4712]: I0131 05:43:12.865533 4712 status_manager.go:851] "Failed to get status for pod" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" pod="openshift-authentication/oauth-openshift-558db77b4-zsktt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zsktt\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.901015 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:12Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:12Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:12Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-31T05:43:12Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:9bde862635f230b66b73aad05940f6cf2c0555a47fe1db330a20724acca8d497\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:db103f9b4d410efdd30da231ffebe8f093377e6c1e4064ddc68046925eb4627f\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1680805611},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:63fbea3b7080a0b403eaf16b3fed3ceda4cbba1fb0d71797d201d97e0745475c\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eecad2fc166355255907130f5b4a16ed876f792fe4420ae700dbc3741c3a382e\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202122991},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:514f0e3792045ec881b86653f42479597c72556731c6ca2e4c2dabbc1549aee3\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:a7f46ed7189598ac1c08c27d80acf72924f91cc58cb0dde5550553c7618a400f\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1188927217},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:420326d8488ceff2cde22ad8b85d739b0c254d47e703f7ddb1f08f77a48816a6\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:54817da328fa589491a3acbe80acdd88c0830dcc63aaafc08c3539925a1a3b03\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1180692192},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.901642 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.902217 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.902404 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.902555 4712 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Jan 31 05:43:12 crc kubenswrapper[4712]: E0131 05:43:12.902567 4712 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 31 05:43:13 crc kubenswrapper[4712]: I0131 05:43:13.875963 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f4184dd7ed14eeb5e7aede1923b15e5b5ba5a833f3802117a71c6ddfbb406a20"} Jan 31 05:43:13 crc kubenswrapper[4712]: I0131 05:43:13.876707 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"00f97cec352a2fb3bbdb527c16649a4245c238cd3829e9bd1ad860b993811542"} Jan 31 05:43:13 crc kubenswrapper[4712]: I0131 05:43:13.876722 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4b478e74507d5524dccd6f1fa20822b793c317a03cf9918a381196405c803744"} Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.887716 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.887783 4712 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad" exitCode=1 Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.887875 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad"} Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.888444 4712 scope.go:117] "RemoveContainer" containerID="093193e95902d9453fdec0d41831ab2066384f30716dcf7bf671e110a327b7ad" Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.892378 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"96632d919ac8605e7cc06d716aa330adde3cd27f019f7880df02dec2bea5f6ce"} Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.892428 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"af95fb390c7870da22663d0c124224f6366f9a790e87d7a6e660ba4037bc3d83"} Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.892676 4712 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.892696 4712 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:14 crc kubenswrapper[4712]: I0131 05:43:14.892818 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:15 crc kubenswrapper[4712]: I0131 05:43:15.358838 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:43:15 crc kubenswrapper[4712]: I0131 05:43:15.902463 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 31 05:43:15 crc kubenswrapper[4712]: I0131 05:43:15.902876 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9b38c6a1b2b7d0c97008193f145726f5b41308e40d763d996a1015179ca740fa"} Jan 31 05:43:16 crc kubenswrapper[4712]: I0131 05:43:16.533050 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:16 crc kubenswrapper[4712]: I0131 05:43:16.533109 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:16 crc kubenswrapper[4712]: I0131 05:43:16.539373 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:17 crc kubenswrapper[4712]: I0131 05:43:17.192485 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.589236 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.597240 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.902503 4712 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.926668 4712 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.926702 4712 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.930463 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:19 crc kubenswrapper[4712]: I0131 05:43:19.933004 4712 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="0078e55c-5a52-4299-95d9-7afba1f378e4" Jan 31 05:43:20 crc kubenswrapper[4712]: I0131 05:43:20.931997 4712 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:20 crc kubenswrapper[4712]: I0131 05:43:20.932043 4712 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ac27bb4c-1d85-4ede-88eb-ced27b73d160" Jan 31 05:43:24 crc kubenswrapper[4712]: I0131 05:43:24.524438 4712 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="0078e55c-5a52-4299-95d9-7afba1f378e4" Jan 31 05:43:26 crc kubenswrapper[4712]: I0131 05:43:26.200341 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 31 05:43:26 crc kubenswrapper[4712]: I0131 05:43:26.270165 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 31 05:43:26 crc kubenswrapper[4712]: I0131 05:43:26.845408 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 31 05:43:27 crc kubenswrapper[4712]: I0131 05:43:27.197072 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 31 05:43:27 crc kubenswrapper[4712]: I0131 05:43:27.387111 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 31 05:43:29 crc kubenswrapper[4712]: I0131 05:43:29.356873 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 31 05:43:29 crc kubenswrapper[4712]: I0131 05:43:29.430244 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 31 05:43:30 crc kubenswrapper[4712]: I0131 05:43:30.843518 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 31 05:43:31 crc kubenswrapper[4712]: I0131 05:43:31.244850 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 31 05:43:31 crc kubenswrapper[4712]: I0131 05:43:31.596164 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 31 05:43:31 crc kubenswrapper[4712]: I0131 05:43:31.874317 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 31 05:43:31 crc kubenswrapper[4712]: I0131 05:43:31.894855 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 31 05:43:32 crc kubenswrapper[4712]: I0131 05:43:32.105947 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 31 05:43:32 crc kubenswrapper[4712]: I0131 05:43:32.140759 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 31 05:43:32 crc kubenswrapper[4712]: I0131 05:43:32.392577 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 31 05:43:32 crc kubenswrapper[4712]: I0131 05:43:32.664929 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 31 05:43:33 crc kubenswrapper[4712]: I0131 05:43:33.720397 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.011334 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.062383 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.098989 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.345233 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.390760 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.445229 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.508437 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.509551 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.804291 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 31 05:43:34 crc kubenswrapper[4712]: I0131 05:43:34.916905 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.034071 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.203866 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.239606 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.297113 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.366976 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.473599 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.482936 4712 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.601020 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.609225 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.718549 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.740226 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.769020 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.818088 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.824224 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 31 05:43:35 crc kubenswrapper[4712]: I0131 05:43:35.954080 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.033470 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.084709 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.092307 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.228615 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.246135 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.311610 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.398271 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.400456 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.414005 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.415136 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.469536 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.512345 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.611393 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.680353 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.720495 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 31 05:43:36 crc kubenswrapper[4712]: I0131 05:43:36.826005 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.025085 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.052009 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.193311 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.199811 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.202235 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.206110 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.242297 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.298933 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.336104 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.393457 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.494342 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.582104 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.588754 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.704627 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.712381 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.716503 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.721446 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.823619 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.846017 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.858054 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.860321 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.898586 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 31 05:43:37 crc kubenswrapper[4712]: I0131 05:43:37.907275 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.006495 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.254116 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.380225 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.469626 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.575604 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.587784 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.644715 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.689137 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.769078 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.780130 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.799295 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.802231 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.970622 4712 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 31 05:43:38 crc kubenswrapper[4712]: I0131 05:43:38.974347 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.184601 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.234148 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.234678 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.302236 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.346312 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.354140 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.363023 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.455331 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.560846 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.633676 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.699288 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.707828 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.826600 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.851603 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.900367 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.909350 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.912817 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.917845 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.962199 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 31 05:43:39 crc kubenswrapper[4712]: I0131 05:43:39.974383 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.010255 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.021961 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.085272 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.117499 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.133422 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.202614 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.328298 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.334611 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.357781 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.377627 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.491628 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.557265 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.589449 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.603623 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.671950 4712 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.672670 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.677920 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-zsktt"] Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.677989 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.678916 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.681523 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.684122 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.688789 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.710705 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.710686102 podStartE2EDuration="21.710686102s" podCreationTimestamp="2026-01-31 05:43:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:43:40.706264968 +0000 UTC m=+286.800146809" watchObservedRunningTime="2026-01-31 05:43:40.710686102 +0000 UTC m=+286.804567943" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.762101 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.764501 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.797922 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.807397 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.814005 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.898044 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.911447 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.955110 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 31 05:43:40 crc kubenswrapper[4712]: I0131 05:43:40.988979 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.037288 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.043873 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.070333 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.087713 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.088648 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.137493 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.195626 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.230349 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.489403 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.499193 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.611501 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.683816 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.785186 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.980058 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.981697 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 31 05:43:41 crc kubenswrapper[4712]: I0131 05:43:41.990976 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.044575 4712 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.112943 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.173615 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.202194 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.327852 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.375049 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.416146 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.449646 4712 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.450286 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a" gracePeriod=5 Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.511091 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" path="/var/lib/kubelet/pods/4a308f02-3cb6-4226-85a1-4a82e2289551/volumes" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.512621 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.531410 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.619920 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.627538 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.633648 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.806501 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832461 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn"] Jan 31 05:43:42 crc kubenswrapper[4712]: E0131 05:43:42.832712 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832728 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" Jan 31 05:43:42 crc kubenswrapper[4712]: E0131 05:43:42.832747 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832755 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 31 05:43:42 crc kubenswrapper[4712]: E0131 05:43:42.832774 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" containerName="installer" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832783 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" containerName="installer" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832924 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a308f02-3cb6-4226-85a1-4a82e2289551" containerName="oauth-openshift" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832947 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a81c91-0d8f-4691-a5b2-106bf4378643" containerName="installer" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.832963 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.833503 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.836830 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.837445 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.837495 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.837505 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.838278 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.838406 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.838528 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.838666 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.845505 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.845509 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.845668 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.848136 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.855338 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.861760 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.870190 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941380 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/444823c1-da17-40cc-821f-ff9d821701d3-audit-dir\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941434 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-session\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941473 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-audit-policies\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941502 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941630 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941698 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-error\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941753 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941789 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941807 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941835 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941865 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btfsj\" (UniqueName: \"kubernetes.io/projected/444823c1-da17-40cc-821f-ff9d821701d3-kube-api-access-btfsj\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941893 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-login\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941941 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.941960 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:42 crc kubenswrapper[4712]: I0131 05:43:42.990687 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.033570 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.043012 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.043053 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-error\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.043077 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.043099 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.043118 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044161 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044218 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btfsj\" (UniqueName: \"kubernetes.io/projected/444823c1-da17-40cc-821f-ff9d821701d3-kube-api-access-btfsj\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044251 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-login\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044293 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044319 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044352 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/444823c1-da17-40cc-821f-ff9d821701d3-audit-dir\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044375 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-session\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044402 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-audit-policies\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.044426 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.045647 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.046045 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.046622 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.046679 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/444823c1-da17-40cc-821f-ff9d821701d3-audit-dir\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.046862 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/444823c1-da17-40cc-821f-ff9d821701d3-audit-policies\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.049334 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.049761 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-error\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.050576 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-session\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.050157 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.050904 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.052343 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-template-login\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.052732 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.058165 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.064825 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/444823c1-da17-40cc-821f-ff9d821701d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.067637 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btfsj\" (UniqueName: \"kubernetes.io/projected/444823c1-da17-40cc-821f-ff9d821701d3-kube-api-access-btfsj\") pod \"oauth-openshift-7fdcdd74b7-j2qbn\" (UID: \"444823c1-da17-40cc-821f-ff9d821701d3\") " pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.156368 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.201225 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.246154 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.254655 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.367976 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.380243 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.654948 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.663109 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.671389 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.708670 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.740558 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.870433 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 31 05:43:43 crc kubenswrapper[4712]: I0131 05:43:43.985454 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.013456 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.220077 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.227327 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.244445 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.299825 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.389115 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.428024 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.471325 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn"] Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.501684 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.508057 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.535731 4712 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.595338 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.677161 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.686489 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn"] Jan 31 05:43:44 crc kubenswrapper[4712]: I0131 05:43:44.831208 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.047605 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.079238 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" event={"ID":"444823c1-da17-40cc-821f-ff9d821701d3","Type":"ContainerStarted","Data":"d97fc26249b655fa4d153fd91016ba6800d15cd25e046c222d17ff7c766f5973"} Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.079280 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" event={"ID":"444823c1-da17-40cc-821f-ff9d821701d3","Type":"ContainerStarted","Data":"779d6e7af35de8346c6d6ad66a810f23a3ff43181a836fe81169a840833bd5dc"} Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.080115 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.108667 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" podStartSLOduration=63.108647998 podStartE2EDuration="1m3.108647998s" podCreationTimestamp="2026-01-31 05:42:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:43:45.104940403 +0000 UTC m=+291.198822244" watchObservedRunningTime="2026-01-31 05:43:45.108647998 +0000 UTC m=+291.202529839" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.109346 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.186218 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.192980 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.214711 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.300262 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.377099 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.404293 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7fdcdd74b7-j2qbn" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.421064 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.482531 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.542782 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.557864 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.584507 4712 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.688148 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.817935 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 31 05:43:45 crc kubenswrapper[4712]: I0131 05:43:45.897144 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.097353 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.242747 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.298217 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.336735 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.400407 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.450755 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.590949 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.756094 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.905866 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 31 05:43:46 crc kubenswrapper[4712]: I0131 05:43:46.984333 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.089163 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.305467 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.317468 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.743392 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.848996 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.881976 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 31 05:43:47 crc kubenswrapper[4712]: I0131 05:43:47.963771 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.043920 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.044416 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.095803 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.096103 4712 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a" exitCode=137 Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.096202 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.096164 4712 scope.go:117] "RemoveContainer" containerID="c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111823 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111859 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111904 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111913 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111938 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.111965 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112023 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112062 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112290 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112330 4712 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112347 4712 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.112360 4712 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.115729 4712 scope.go:117] "RemoveContainer" containerID="c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a" Jan 31 05:43:48 crc kubenswrapper[4712]: E0131 05:43:48.116148 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a\": container with ID starting with c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a not found: ID does not exist" containerID="c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.116351 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a"} err="failed to get container status \"c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a\": rpc error: code = NotFound desc = could not find container \"c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a\": container with ID starting with c5ba9bc5e6451fb1d003c25ed64508fe61b9fb4e73dcb40581b728ec4c15ef7a not found: ID does not exist" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.122258 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.213724 4712 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.214334 4712 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.457066 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.514325 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.648541 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.665192 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 31 05:43:48 crc kubenswrapper[4712]: I0131 05:43:48.890288 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 31 05:43:49 crc kubenswrapper[4712]: I0131 05:43:49.498829 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 31 05:43:49 crc kubenswrapper[4712]: I0131 05:43:49.998304 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 31 05:43:54 crc kubenswrapper[4712]: I0131 05:43:54.339704 4712 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 31 05:44:02 crc kubenswrapper[4712]: I0131 05:44:02.173102 4712 generic.go:334] "Generic (PLEG): container finished" podID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerID="81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7" exitCode=0 Jan 31 05:44:02 crc kubenswrapper[4712]: I0131 05:44:02.173150 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerDied","Data":"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7"} Jan 31 05:44:02 crc kubenswrapper[4712]: I0131 05:44:02.174071 4712 scope.go:117] "RemoveContainer" containerID="81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7" Jan 31 05:44:03 crc kubenswrapper[4712]: I0131 05:44:03.180289 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerStarted","Data":"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4"} Jan 31 05:44:03 crc kubenswrapper[4712]: I0131 05:44:03.180933 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:44:03 crc kubenswrapper[4712]: I0131 05:44:03.183304 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:44:09 crc kubenswrapper[4712]: I0131 05:44:09.722501 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:44:09 crc kubenswrapper[4712]: I0131 05:44:09.723306 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerName="controller-manager" containerID="cri-o://c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1" gracePeriod=30 Jan 31 05:44:09 crc kubenswrapper[4712]: I0131 05:44:09.814766 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:44:09 crc kubenswrapper[4712]: I0131 05:44:09.814977 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" podUID="fbea73a1-2703-4775-9f42-8c8340e76f46" containerName="route-controller-manager" containerID="cri-o://c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae" gracePeriod=30 Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.063087 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.188245 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.201988 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config\") pod \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202042 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles\") pod \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202101 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n5r8\" (UniqueName: \"kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8\") pod \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202150 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca\") pod \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202199 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert\") pod \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\" (UID: \"549f7a4d-8fba-47e2-8b51-bb660fe413b4\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202786 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca" (OuterVolumeSpecName: "client-ca") pod "549f7a4d-8fba-47e2-8b51-bb660fe413b4" (UID: "549f7a4d-8fba-47e2-8b51-bb660fe413b4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202861 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config" (OuterVolumeSpecName: "config") pod "549f7a4d-8fba-47e2-8b51-bb660fe413b4" (UID: "549f7a4d-8fba-47e2-8b51-bb660fe413b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.202855 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "549f7a4d-8fba-47e2-8b51-bb660fe413b4" (UID: "549f7a4d-8fba-47e2-8b51-bb660fe413b4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.211223 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8" (OuterVolumeSpecName: "kube-api-access-8n5r8") pod "549f7a4d-8fba-47e2-8b51-bb660fe413b4" (UID: "549f7a4d-8fba-47e2-8b51-bb660fe413b4"). InnerVolumeSpecName "kube-api-access-8n5r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.214591 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "549f7a4d-8fba-47e2-8b51-bb660fe413b4" (UID: "549f7a4d-8fba-47e2-8b51-bb660fe413b4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.220388 4712 generic.go:334] "Generic (PLEG): container finished" podID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerID="c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1" exitCode=0 Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.220470 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" event={"ID":"549f7a4d-8fba-47e2-8b51-bb660fe413b4","Type":"ContainerDied","Data":"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1"} Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.220499 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" event={"ID":"549f7a4d-8fba-47e2-8b51-bb660fe413b4","Type":"ContainerDied","Data":"93b49e79bafecf62a18560cc62b8043a1af55f0035e67b1b35c41ac90483358d"} Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.220514 4712 scope.go:117] "RemoveContainer" containerID="c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.220878 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mvft" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.223159 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.223195 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" event={"ID":"fbea73a1-2703-4775-9f42-8c8340e76f46","Type":"ContainerDied","Data":"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae"} Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.223155 4712 generic.go:334] "Generic (PLEG): container finished" podID="fbea73a1-2703-4775-9f42-8c8340e76f46" containerID="c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae" exitCode=0 Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.223448 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6" event={"ID":"fbea73a1-2703-4775-9f42-8c8340e76f46","Type":"ContainerDied","Data":"04e652c086148c2da7505f775cf86074dce681ee123a160831af4e9fb73efb70"} Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.234316 4712 scope.go:117] "RemoveContainer" containerID="c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1" Jan 31 05:44:10 crc kubenswrapper[4712]: E0131 05:44:10.234697 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1\": container with ID starting with c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1 not found: ID does not exist" containerID="c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.234725 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1"} err="failed to get container status \"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1\": rpc error: code = NotFound desc = could not find container \"c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1\": container with ID starting with c00d4a70dcb023cb2e7012651f8be60697c3aaaf4759fa6015f415da7c932cc1 not found: ID does not exist" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.234742 4712 scope.go:117] "RemoveContainer" containerID="c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.251334 4712 scope.go:117] "RemoveContainer" containerID="c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae" Jan 31 05:44:10 crc kubenswrapper[4712]: E0131 05:44:10.251725 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae\": container with ID starting with c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae not found: ID does not exist" containerID="c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.251748 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae"} err="failed to get container status \"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae\": rpc error: code = NotFound desc = could not find container \"c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae\": container with ID starting with c16ba8d92b3a53c1e2188c4d7c2c3e595354c9c23c1a864704f812294d603dae not found: ID does not exist" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.256887 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.260354 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mvft"] Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.304063 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config\") pod \"fbea73a1-2703-4775-9f42-8c8340e76f46\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.304328 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca\") pod \"fbea73a1-2703-4775-9f42-8c8340e76f46\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.304509 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rgl5\" (UniqueName: \"kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5\") pod \"fbea73a1-2703-4775-9f42-8c8340e76f46\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.304624 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert\") pod \"fbea73a1-2703-4775-9f42-8c8340e76f46\" (UID: \"fbea73a1-2703-4775-9f42-8c8340e76f46\") " Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305208 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n5r8\" (UniqueName: \"kubernetes.io/projected/549f7a4d-8fba-47e2-8b51-bb660fe413b4-kube-api-access-8n5r8\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305229 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305238 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/549f7a4d-8fba-47e2-8b51-bb660fe413b4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305247 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305257 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/549f7a4d-8fba-47e2-8b51-bb660fe413b4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.305998 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca" (OuterVolumeSpecName: "client-ca") pod "fbea73a1-2703-4775-9f42-8c8340e76f46" (UID: "fbea73a1-2703-4775-9f42-8c8340e76f46"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.306119 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config" (OuterVolumeSpecName: "config") pod "fbea73a1-2703-4775-9f42-8c8340e76f46" (UID: "fbea73a1-2703-4775-9f42-8c8340e76f46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.308501 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fbea73a1-2703-4775-9f42-8c8340e76f46" (UID: "fbea73a1-2703-4775-9f42-8c8340e76f46"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.308646 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5" (OuterVolumeSpecName: "kube-api-access-7rgl5") pod "fbea73a1-2703-4775-9f42-8c8340e76f46" (UID: "fbea73a1-2703-4775-9f42-8c8340e76f46"). InnerVolumeSpecName "kube-api-access-7rgl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.406927 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.406964 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbea73a1-2703-4775-9f42-8c8340e76f46-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.406976 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rgl5\" (UniqueName: \"kubernetes.io/projected/fbea73a1-2703-4775-9f42-8c8340e76f46-kube-api-access-7rgl5\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.406986 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbea73a1-2703-4775-9f42-8c8340e76f46-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.510073 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" path="/var/lib/kubelet/pods/549f7a4d-8fba-47e2-8b51-bb660fe413b4/volumes" Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.541633 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:44:10 crc kubenswrapper[4712]: I0131 05:44:10.546016 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gn8v6"] Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.839783 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:11 crc kubenswrapper[4712]: E0131 05:44:11.840210 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerName="controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.840235 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerName="controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: E0131 05:44:11.840264 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbea73a1-2703-4775-9f42-8c8340e76f46" containerName="route-controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.840279 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbea73a1-2703-4775-9f42-8c8340e76f46" containerName="route-controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.840492 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbea73a1-2703-4775-9f42-8c8340e76f46" containerName="route-controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.840529 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="549f7a4d-8fba-47e2-8b51-bb660fe413b4" containerName="controller-manager" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.841309 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.847430 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.848124 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.848149 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.850530 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.850548 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.850793 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.860491 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.861003 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.862669 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.862940 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.863757 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.867490 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.867544 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.867670 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.867712 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.869250 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.871558 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.928334 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.928447 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjzn6\" (UniqueName: \"kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.928735 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.929046 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.929139 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.929377 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.929914 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dqdx\" (UniqueName: \"kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.930346 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:11 crc kubenswrapper[4712]: I0131 05:44:11.930559 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031839 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031881 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjzn6\" (UniqueName: \"kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031907 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031936 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031959 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.031977 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.032008 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dqdx\" (UniqueName: \"kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.032030 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.032051 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.033490 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.034406 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.035473 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.039933 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.041411 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.042480 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.044016 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.050750 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjzn6\" (UniqueName: \"kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6\") pod \"controller-manager-7b5d9c9d88-l86qr\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.053789 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dqdx\" (UniqueName: \"kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx\") pod \"route-controller-manager-65b6fc7b95-sbqs2\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.165721 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.210944 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.436380 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:12 crc kubenswrapper[4712]: W0131 05:44:12.453883 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4d908bd_8fa6_46f6_83c1_6bdc5767e526.slice/crio-865c00d3c2317929ff7bf25b697013f07a1fefad7c60a981f1e25fc91912e448 WatchSource:0}: Error finding container 865c00d3c2317929ff7bf25b697013f07a1fefad7c60a981f1e25fc91912e448: Status 404 returned error can't find the container with id 865c00d3c2317929ff7bf25b697013f07a1fefad7c60a981f1e25fc91912e448 Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.495063 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:44:12 crc kubenswrapper[4712]: W0131 05:44:12.503304 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb299a7d4_9d51_4566_aed4_21afda358c1b.slice/crio-f7b9483d76b0a5de103c8be1a55dc74bf6957e6d07002614031fbfa4f75df6d9 WatchSource:0}: Error finding container f7b9483d76b0a5de103c8be1a55dc74bf6957e6d07002614031fbfa4f75df6d9: Status 404 returned error can't find the container with id f7b9483d76b0a5de103c8be1a55dc74bf6957e6d07002614031fbfa4f75df6d9 Jan 31 05:44:12 crc kubenswrapper[4712]: I0131 05:44:12.512849 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbea73a1-2703-4775-9f42-8c8340e76f46" path="/var/lib/kubelet/pods/fbea73a1-2703-4775-9f42-8c8340e76f46/volumes" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.245900 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" event={"ID":"b299a7d4-9d51-4566-aed4-21afda358c1b","Type":"ContainerStarted","Data":"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6"} Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.246403 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.246428 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" event={"ID":"b299a7d4-9d51-4566-aed4-21afda358c1b","Type":"ContainerStarted","Data":"f7b9483d76b0a5de103c8be1a55dc74bf6957e6d07002614031fbfa4f75df6d9"} Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.247105 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" event={"ID":"f4d908bd-8fa6-46f6-83c1-6bdc5767e526","Type":"ContainerStarted","Data":"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a"} Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.247146 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" event={"ID":"f4d908bd-8fa6-46f6-83c1-6bdc5767e526","Type":"ContainerStarted","Data":"865c00d3c2317929ff7bf25b697013f07a1fefad7c60a981f1e25fc91912e448"} Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.247713 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.254637 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.267864 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" podStartSLOduration=3.267845266 podStartE2EDuration="3.267845266s" podCreationTimestamp="2026-01-31 05:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:44:13.26140578 +0000 UTC m=+319.355287621" watchObservedRunningTime="2026-01-31 05:44:13.267845266 +0000 UTC m=+319.361727107" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.280029 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" podStartSLOduration=3.280011928 podStartE2EDuration="3.280011928s" podCreationTimestamp="2026-01-31 05:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:44:13.276613761 +0000 UTC m=+319.370495632" watchObservedRunningTime="2026-01-31 05:44:13.280011928 +0000 UTC m=+319.373893769" Jan 31 05:44:13 crc kubenswrapper[4712]: I0131 05:44:13.363654 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:44:29 crc kubenswrapper[4712]: I0131 05:44:29.726510 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:29 crc kubenswrapper[4712]: I0131 05:44:29.727514 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" podUID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" containerName="controller-manager" containerID="cri-o://25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a" gracePeriod=30 Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.182148 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.276617 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca\") pod \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.276688 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config\") pod \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.276750 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert\") pod \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.276778 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles\") pod \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.276877 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjzn6\" (UniqueName: \"kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6\") pod \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\" (UID: \"f4d908bd-8fa6-46f6-83c1-6bdc5767e526\") " Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.277663 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca" (OuterVolumeSpecName: "client-ca") pod "f4d908bd-8fa6-46f6-83c1-6bdc5767e526" (UID: "f4d908bd-8fa6-46f6-83c1-6bdc5767e526"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.277678 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config" (OuterVolumeSpecName: "config") pod "f4d908bd-8fa6-46f6-83c1-6bdc5767e526" (UID: "f4d908bd-8fa6-46f6-83c1-6bdc5767e526"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.277710 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f4d908bd-8fa6-46f6-83c1-6bdc5767e526" (UID: "f4d908bd-8fa6-46f6-83c1-6bdc5767e526"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.289403 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f4d908bd-8fa6-46f6-83c1-6bdc5767e526" (UID: "f4d908bd-8fa6-46f6-83c1-6bdc5767e526"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.291355 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6" (OuterVolumeSpecName: "kube-api-access-wjzn6") pod "f4d908bd-8fa6-46f6-83c1-6bdc5767e526" (UID: "f4d908bd-8fa6-46f6-83c1-6bdc5767e526"). InnerVolumeSpecName "kube-api-access-wjzn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.337727 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" containerID="25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a" exitCode=0 Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.337802 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.337794 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" event={"ID":"f4d908bd-8fa6-46f6-83c1-6bdc5767e526","Type":"ContainerDied","Data":"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a"} Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.337876 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr" event={"ID":"f4d908bd-8fa6-46f6-83c1-6bdc5767e526","Type":"ContainerDied","Data":"865c00d3c2317929ff7bf25b697013f07a1fefad7c60a981f1e25fc91912e448"} Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.337903 4712 scope.go:117] "RemoveContainer" containerID="25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.355140 4712 scope.go:117] "RemoveContainer" containerID="25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a" Jan 31 05:44:30 crc kubenswrapper[4712]: E0131 05:44:30.355644 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a\": container with ID starting with 25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a not found: ID does not exist" containerID="25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.355676 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a"} err="failed to get container status \"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a\": rpc error: code = NotFound desc = could not find container \"25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a\": container with ID starting with 25014c1d199886493958c335b3b29fd8d1b560055c38d04ca5a10648f8f4537a not found: ID does not exist" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.366264 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.368687 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7b5d9c9d88-l86qr"] Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.378754 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjzn6\" (UniqueName: \"kubernetes.io/projected/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-kube-api-access-wjzn6\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.378778 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.378788 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.378797 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.378805 4712 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f4d908bd-8fa6-46f6-83c1-6bdc5767e526-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.509736 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" path="/var/lib/kubelet/pods/f4d908bd-8fa6-46f6-83c1-6bdc5767e526/volumes" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.849028 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw"] Jan 31 05:44:30 crc kubenswrapper[4712]: E0131 05:44:30.849637 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" containerName="controller-manager" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.849651 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" containerName="controller-manager" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.849751 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d908bd-8fa6-46f6-83c1-6bdc5767e526" containerName="controller-manager" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.850270 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.853670 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.854364 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.854910 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.855692 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.855741 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.857029 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.862241 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.863473 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw"] Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.983612 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-proxy-ca-bundles\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.983677 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-config\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.983747 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-client-ca\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.983768 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6eae574-6e6e-423d-9b1b-cd822a2ba856-serving-cert\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:30 crc kubenswrapper[4712]: I0131 05:44:30.983814 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx8dg\" (UniqueName: \"kubernetes.io/projected/a6eae574-6e6e-423d-9b1b-cd822a2ba856-kube-api-access-zx8dg\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.084803 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-proxy-ca-bundles\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.084858 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-config\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.084890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-client-ca\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.084909 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6eae574-6e6e-423d-9b1b-cd822a2ba856-serving-cert\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.084939 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx8dg\" (UniqueName: \"kubernetes.io/projected/a6eae574-6e6e-423d-9b1b-cd822a2ba856-kube-api-access-zx8dg\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.086249 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-client-ca\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.086464 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-proxy-ca-bundles\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.086546 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6eae574-6e6e-423d-9b1b-cd822a2ba856-config\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.089883 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6eae574-6e6e-423d-9b1b-cd822a2ba856-serving-cert\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.102810 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx8dg\" (UniqueName: \"kubernetes.io/projected/a6eae574-6e6e-423d-9b1b-cd822a2ba856-kube-api-access-zx8dg\") pod \"controller-manager-77f8b7cd94-bw2nw\" (UID: \"a6eae574-6e6e-423d-9b1b-cd822a2ba856\") " pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.165496 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:31 crc kubenswrapper[4712]: I0131 05:44:31.633171 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw"] Jan 31 05:44:32 crc kubenswrapper[4712]: I0131 05:44:32.350145 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" event={"ID":"a6eae574-6e6e-423d-9b1b-cd822a2ba856","Type":"ContainerStarted","Data":"8f70f3237c2cc1def33af2087b9cfc008ed924bbe6c1b8bd42230ff1fb07564f"} Jan 31 05:44:32 crc kubenswrapper[4712]: I0131 05:44:32.350524 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" event={"ID":"a6eae574-6e6e-423d-9b1b-cd822a2ba856","Type":"ContainerStarted","Data":"4838a336397e7be3e9dbe374474889fb65a2305bf3a4e496d1f655eb65b9454a"} Jan 31 05:44:32 crc kubenswrapper[4712]: I0131 05:44:32.350541 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:32 crc kubenswrapper[4712]: I0131 05:44:32.354744 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" Jan 31 05:44:32 crc kubenswrapper[4712]: I0131 05:44:32.368017 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-77f8b7cd94-bw2nw" podStartSLOduration=3.367994238 podStartE2EDuration="3.367994238s" podCreationTimestamp="2026-01-31 05:44:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:44:32.365385261 +0000 UTC m=+338.459267112" watchObservedRunningTime="2026-01-31 05:44:32.367994238 +0000 UTC m=+338.461876079" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.157562 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh"] Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.158960 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.162288 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.163776 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.177650 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh"] Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.246025 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.246091 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxgc4\" (UniqueName: \"kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.246132 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.347004 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxgc4\" (UniqueName: \"kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.347085 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.347138 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.348142 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.357434 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.363934 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxgc4\" (UniqueName: \"kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4\") pod \"collect-profiles-29497305-pfvsh\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.491008 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:00 crc kubenswrapper[4712]: I0131 05:45:00.896078 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh"] Jan 31 05:45:01 crc kubenswrapper[4712]: I0131 05:45:01.518261 4712 generic.go:334] "Generic (PLEG): container finished" podID="b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" containerID="d449ac7e9b8973adfaaddf1833b128f805501c3cf9e3e9a2b6536eacf8bc5656" exitCode=0 Jan 31 05:45:01 crc kubenswrapper[4712]: I0131 05:45:01.518584 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" event={"ID":"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e","Type":"ContainerDied","Data":"d449ac7e9b8973adfaaddf1833b128f805501c3cf9e3e9a2b6536eacf8bc5656"} Jan 31 05:45:01 crc kubenswrapper[4712]: I0131 05:45:01.518615 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" event={"ID":"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e","Type":"ContainerStarted","Data":"6dad40b0f5a96b91aa7cb6af5876d41cb1ea9bb6f291278e056b69509911b7e2"} Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.896044 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.979419 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume\") pod \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.979473 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume\") pod \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.979641 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxgc4\" (UniqueName: \"kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4\") pod \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\" (UID: \"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e\") " Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.980432 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume" (OuterVolumeSpecName: "config-volume") pod "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" (UID: "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.985296 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" (UID: "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:45:02 crc kubenswrapper[4712]: I0131 05:45:02.985570 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4" (OuterVolumeSpecName: "kube-api-access-vxgc4") pod "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" (UID: "b31acf79-3f94-4e6f-ae2e-3f064f8cc35e"). InnerVolumeSpecName "kube-api-access-vxgc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.080914 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.080954 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.080966 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxgc4\" (UniqueName: \"kubernetes.io/projected/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e-kube-api-access-vxgc4\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.531165 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" event={"ID":"b31acf79-3f94-4e6f-ae2e-3f064f8cc35e","Type":"ContainerDied","Data":"6dad40b0f5a96b91aa7cb6af5876d41cb1ea9bb6f291278e056b69509911b7e2"} Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.531617 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dad40b0f5a96b91aa7cb6af5876d41cb1ea9bb6f291278e056b69509911b7e2" Jan 31 05:45:03 crc kubenswrapper[4712]: I0131 05:45:03.531251 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.712934 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-brd4s"] Jan 31 05:45:04 crc kubenswrapper[4712]: E0131 05:45:04.713257 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" containerName="collect-profiles" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.713295 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" containerName="collect-profiles" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.713446 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" containerName="collect-profiles" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.714052 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.753738 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-brd4s"] Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803314 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803392 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-tls\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803416 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-bound-sa-token\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803440 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-trusted-ca\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803465 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-certificates\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803644 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803704 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slck5\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-kube-api-access-slck5\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.803783 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.829097 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.905571 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.905671 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-tls\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.905697 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-bound-sa-token\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.905732 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-trusted-ca\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.905760 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-certificates\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.907060 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-trusted-ca\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.907276 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.907324 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slck5\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-kube-api-access-slck5\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.908508 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-certificates\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.908613 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.911198 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.915813 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-registry-tls\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.923977 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-bound-sa-token\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:04 crc kubenswrapper[4712]: I0131 05:45:04.924582 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slck5\" (UniqueName: \"kubernetes.io/projected/a00b3879-8c88-4f0e-9e04-e5cd5d37fffd-kube-api-access-slck5\") pod \"image-registry-66df7c8f76-brd4s\" (UID: \"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd\") " pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:05 crc kubenswrapper[4712]: I0131 05:45:05.054983 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:05 crc kubenswrapper[4712]: I0131 05:45:05.447944 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-brd4s"] Jan 31 05:45:05 crc kubenswrapper[4712]: W0131 05:45:05.452320 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda00b3879_8c88_4f0e_9e04_e5cd5d37fffd.slice/crio-afdb88729f3305aebba2e219f7d275641de19015305d1a3cee5abb1a3327ee61 WatchSource:0}: Error finding container afdb88729f3305aebba2e219f7d275641de19015305d1a3cee5abb1a3327ee61: Status 404 returned error can't find the container with id afdb88729f3305aebba2e219f7d275641de19015305d1a3cee5abb1a3327ee61 Jan 31 05:45:05 crc kubenswrapper[4712]: I0131 05:45:05.543768 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" event={"ID":"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd","Type":"ContainerStarted","Data":"afdb88729f3305aebba2e219f7d275641de19015305d1a3cee5abb1a3327ee61"} Jan 31 05:45:06 crc kubenswrapper[4712]: I0131 05:45:06.549459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" event={"ID":"a00b3879-8c88-4f0e-9e04-e5cd5d37fffd","Type":"ContainerStarted","Data":"78450302ed22d08d4a9fd563f656be715b22cf2c9f15b5dbec64aa7839183718"} Jan 31 05:45:06 crc kubenswrapper[4712]: I0131 05:45:06.549601 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:06 crc kubenswrapper[4712]: I0131 05:45:06.569876 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" podStartSLOduration=2.56985622 podStartE2EDuration="2.56985622s" podCreationTimestamp="2026-01-31 05:45:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:45:06.569201223 +0000 UTC m=+372.663083084" watchObservedRunningTime="2026-01-31 05:45:06.56985622 +0000 UTC m=+372.663738071" Jan 31 05:45:09 crc kubenswrapper[4712]: I0131 05:45:09.707507 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:45:09 crc kubenswrapper[4712]: I0131 05:45:09.709119 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" podUID="b299a7d4-9d51-4566-aed4-21afda358c1b" containerName="route-controller-manager" containerID="cri-o://ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6" gracePeriod=30 Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.099381 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.177951 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config\") pod \"b299a7d4-9d51-4566-aed4-21afda358c1b\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.178008 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca\") pod \"b299a7d4-9d51-4566-aed4-21afda358c1b\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.178117 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert\") pod \"b299a7d4-9d51-4566-aed4-21afda358c1b\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.178140 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dqdx\" (UniqueName: \"kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx\") pod \"b299a7d4-9d51-4566-aed4-21afda358c1b\" (UID: \"b299a7d4-9d51-4566-aed4-21afda358c1b\") " Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.178775 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config" (OuterVolumeSpecName: "config") pod "b299a7d4-9d51-4566-aed4-21afda358c1b" (UID: "b299a7d4-9d51-4566-aed4-21afda358c1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.178817 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca" (OuterVolumeSpecName: "client-ca") pod "b299a7d4-9d51-4566-aed4-21afda358c1b" (UID: "b299a7d4-9d51-4566-aed4-21afda358c1b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.182782 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b299a7d4-9d51-4566-aed4-21afda358c1b" (UID: "b299a7d4-9d51-4566-aed4-21afda358c1b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.195866 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx" (OuterVolumeSpecName: "kube-api-access-6dqdx") pod "b299a7d4-9d51-4566-aed4-21afda358c1b" (UID: "b299a7d4-9d51-4566-aed4-21afda358c1b"). InnerVolumeSpecName "kube-api-access-6dqdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.279245 4712 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b299a7d4-9d51-4566-aed4-21afda358c1b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.279536 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dqdx\" (UniqueName: \"kubernetes.io/projected/b299a7d4-9d51-4566-aed4-21afda358c1b-kube-api-access-6dqdx\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.279552 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.279562 4712 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b299a7d4-9d51-4566-aed4-21afda358c1b-client-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.572853 4712 generic.go:334] "Generic (PLEG): container finished" podID="b299a7d4-9d51-4566-aed4-21afda358c1b" containerID="ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6" exitCode=0 Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.572917 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" event={"ID":"b299a7d4-9d51-4566-aed4-21afda358c1b","Type":"ContainerDied","Data":"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6"} Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.572936 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.572946 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2" event={"ID":"b299a7d4-9d51-4566-aed4-21afda358c1b","Type":"ContainerDied","Data":"f7b9483d76b0a5de103c8be1a55dc74bf6957e6d07002614031fbfa4f75df6d9"} Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.572984 4712 scope.go:117] "RemoveContainer" containerID="ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.589934 4712 scope.go:117] "RemoveContainer" containerID="ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6" Jan 31 05:45:10 crc kubenswrapper[4712]: E0131 05:45:10.590351 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6\": container with ID starting with ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6 not found: ID does not exist" containerID="ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.590397 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6"} err="failed to get container status \"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6\": rpc error: code = NotFound desc = could not find container \"ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6\": container with ID starting with ff459054ce3f007376fd60b243fec80831102bb9fa627d166d632fb35a492da6 not found: ID does not exist" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.591983 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.595089 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b6fc7b95-sbqs2"] Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.873226 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb"] Jan 31 05:45:10 crc kubenswrapper[4712]: E0131 05:45:10.873455 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b299a7d4-9d51-4566-aed4-21afda358c1b" containerName="route-controller-manager" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.873468 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b299a7d4-9d51-4566-aed4-21afda358c1b" containerName="route-controller-manager" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.873563 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b299a7d4-9d51-4566-aed4-21afda358c1b" containerName="route-controller-manager" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.873900 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.875543 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.876216 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.876572 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.876730 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.876838 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.876941 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.889209 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb"] Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.989225 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bl6z\" (UniqueName: \"kubernetes.io/projected/dc079c6f-4431-401a-ae0f-88753b900591-kube-api-access-9bl6z\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.989287 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-config\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.989424 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-client-ca\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:10 crc kubenswrapper[4712]: I0131 05:45:10.989525 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc079c6f-4431-401a-ae0f-88753b900591-serving-cert\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.091110 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bl6z\" (UniqueName: \"kubernetes.io/projected/dc079c6f-4431-401a-ae0f-88753b900591-kube-api-access-9bl6z\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.091157 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-config\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.091212 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-client-ca\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.091242 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc079c6f-4431-401a-ae0f-88753b900591-serving-cert\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.092339 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-client-ca\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.092575 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc079c6f-4431-401a-ae0f-88753b900591-config\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.100020 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc079c6f-4431-401a-ae0f-88753b900591-serving-cert\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.105875 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bl6z\" (UniqueName: \"kubernetes.io/projected/dc079c6f-4431-401a-ae0f-88753b900591-kube-api-access-9bl6z\") pod \"route-controller-manager-7cd8bd8d9b-mc2xb\" (UID: \"dc079c6f-4431-401a-ae0f-88753b900591\") " pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.191499 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:11 crc kubenswrapper[4712]: I0131 05:45:11.596961 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb"] Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.498679 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.498759 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.518604 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b299a7d4-9d51-4566-aed4-21afda358c1b" path="/var/lib/kubelet/pods/b299a7d4-9d51-4566-aed4-21afda358c1b/volumes" Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.590671 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" event={"ID":"dc079c6f-4431-401a-ae0f-88753b900591","Type":"ContainerStarted","Data":"b7cceca6ef54bfdac97e1459c541f80740c8e89c63f025f7fd3f436030063269"} Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.590729 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" event={"ID":"dc079c6f-4431-401a-ae0f-88753b900591","Type":"ContainerStarted","Data":"d1dfbc9579a3266ce1bc15b29b61aca988938ffc62883d35d688efd54c5911f0"} Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.591138 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.600678 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" Jan 31 05:45:12 crc kubenswrapper[4712]: I0131 05:45:12.613822 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7cd8bd8d9b-mc2xb" podStartSLOduration=3.613791823 podStartE2EDuration="3.613791823s" podCreationTimestamp="2026-01-31 05:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:45:12.606885216 +0000 UTC m=+378.700767067" watchObservedRunningTime="2026-01-31 05:45:12.613791823 +0000 UTC m=+378.707673694" Jan 31 05:45:25 crc kubenswrapper[4712]: I0131 05:45:25.061616 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-brd4s" Jan 31 05:45:25 crc kubenswrapper[4712]: I0131 05:45:25.139551 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.720875 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.721861 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-njk9h" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="registry-server" containerID="cri-o://19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff" gracePeriod=30 Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.733544 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.733795 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dpr8n" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="registry-server" containerID="cri-o://47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca" gracePeriod=30 Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.746910 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.747133 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" containerID="cri-o://9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4" gracePeriod=30 Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.755726 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jp69c"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.756427 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.775669 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.775957 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cpm9t" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="registry-server" containerID="cri-o://5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208" gracePeriod=30 Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.792759 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.793031 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-djd7s" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="registry-server" containerID="cri-o://e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb" gracePeriod=30 Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.796212 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jp69c"] Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.927898 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.927980 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:41 crc kubenswrapper[4712]: I0131 05:45:41.928002 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdd5k\" (UniqueName: \"kubernetes.io/projected/d31cb490-ecb9-4f62-8633-a6239f98d3a2-kube-api-access-rdd5k\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.028769 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.029120 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdd5k\" (UniqueName: \"kubernetes.io/projected/d31cb490-ecb9-4f62-8633-a6239f98d3a2-kube-api-access-rdd5k\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.029195 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.035278 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.040283 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d31cb490-ecb9-4f62-8633-a6239f98d3a2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.050255 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdd5k\" (UniqueName: \"kubernetes.io/projected/d31cb490-ecb9-4f62-8633-a6239f98d3a2-kube-api-access-rdd5k\") pod \"marketplace-operator-79b997595-jp69c\" (UID: \"d31cb490-ecb9-4f62-8633-a6239f98d3a2\") " pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.074214 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.217252 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.229275 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232651 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities\") pod \"b4b6ff77-738a-480a-b29c-30a4a0d42182\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232717 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content\") pod \"b4b6ff77-738a-480a-b29c-30a4a0d42182\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232754 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbb49\" (UniqueName: \"kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49\") pod \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232837 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities\") pod \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232870 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content\") pod \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\" (UID: \"1fd9f25f-4108-411d-a106-3d94f4afe2bf\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.232897 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg7z7\" (UniqueName: \"kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7\") pod \"b4b6ff77-738a-480a-b29c-30a4a0d42182\" (UID: \"b4b6ff77-738a-480a-b29c-30a4a0d42182\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.236435 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities" (OuterVolumeSpecName: "utilities") pod "1fd9f25f-4108-411d-a106-3d94f4afe2bf" (UID: "1fd9f25f-4108-411d-a106-3d94f4afe2bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.238413 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.239999 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities" (OuterVolumeSpecName: "utilities") pod "b4b6ff77-738a-480a-b29c-30a4a0d42182" (UID: "b4b6ff77-738a-480a-b29c-30a4a0d42182"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.245400 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.247858 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49" (OuterVolumeSpecName: "kube-api-access-dbb49") pod "1fd9f25f-4108-411d-a106-3d94f4afe2bf" (UID: "1fd9f25f-4108-411d-a106-3d94f4afe2bf"). InnerVolumeSpecName "kube-api-access-dbb49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.247903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7" (OuterVolumeSpecName: "kube-api-access-qg7z7") pod "b4b6ff77-738a-480a-b29c-30a4a0d42182" (UID: "b4b6ff77-738a-480a-b29c-30a4a0d42182"). InnerVolumeSpecName "kube-api-access-qg7z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.272918 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.286691 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1fd9f25f-4108-411d-a106-3d94f4afe2bf" (UID: "1fd9f25f-4108-411d-a106-3d94f4afe2bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.288539 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4b6ff77-738a-480a-b29c-30a4a0d42182" (UID: "b4b6ff77-738a-480a-b29c-30a4a0d42182"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334027 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities\") pod \"86d1e607-e026-4540-a6b0-fab85244efd3\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334113 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content\") pod \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334156 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics\") pod \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334201 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkl9v\" (UniqueName: \"kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v\") pod \"86d1e607-e026-4540-a6b0-fab85244efd3\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334225 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities\") pod \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334281 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz56g\" (UniqueName: \"kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g\") pod \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\" (UID: \"d110bc72-b196-442c-9ea7-f1460a0b2bf4\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334301 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca\") pod \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334321 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmr87\" (UniqueName: \"kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87\") pod \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\" (UID: \"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334359 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content\") pod \"86d1e607-e026-4540-a6b0-fab85244efd3\" (UID: \"86d1e607-e026-4540-a6b0-fab85244efd3\") " Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334553 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbb49\" (UniqueName: \"kubernetes.io/projected/1fd9f25f-4108-411d-a106-3d94f4afe2bf-kube-api-access-dbb49\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334566 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334574 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fd9f25f-4108-411d-a106-3d94f4afe2bf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334583 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg7z7\" (UniqueName: \"kubernetes.io/projected/b4b6ff77-738a-480a-b29c-30a4a0d42182-kube-api-access-qg7z7\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334592 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.334601 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4b6ff77-738a-480a-b29c-30a4a0d42182-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.339787 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" (UID: "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.340497 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities" (OuterVolumeSpecName: "utilities") pod "d110bc72-b196-442c-9ea7-f1460a0b2bf4" (UID: "d110bc72-b196-442c-9ea7-f1460a0b2bf4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.341821 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g" (OuterVolumeSpecName: "kube-api-access-bz56g") pod "d110bc72-b196-442c-9ea7-f1460a0b2bf4" (UID: "d110bc72-b196-442c-9ea7-f1460a0b2bf4"). InnerVolumeSpecName "kube-api-access-bz56g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.342060 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87" (OuterVolumeSpecName: "kube-api-access-fmr87") pod "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" (UID: "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99"). InnerVolumeSpecName "kube-api-access-fmr87". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.342340 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v" (OuterVolumeSpecName: "kube-api-access-dkl9v") pod "86d1e607-e026-4540-a6b0-fab85244efd3" (UID: "86d1e607-e026-4540-a6b0-fab85244efd3"). InnerVolumeSpecName "kube-api-access-dkl9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.342576 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities" (OuterVolumeSpecName: "utilities") pod "86d1e607-e026-4540-a6b0-fab85244efd3" (UID: "86d1e607-e026-4540-a6b0-fab85244efd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.345056 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" (UID: "e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.360852 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86d1e607-e026-4540-a6b0-fab85244efd3" (UID: "86d1e607-e026-4540-a6b0-fab85244efd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436752 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436791 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86d1e607-e026-4540-a6b0-fab85244efd3-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436802 4712 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436814 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkl9v\" (UniqueName: \"kubernetes.io/projected/86d1e607-e026-4540-a6b0-fab85244efd3-kube-api-access-dkl9v\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436823 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436835 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz56g\" (UniqueName: \"kubernetes.io/projected/d110bc72-b196-442c-9ea7-f1460a0b2bf4-kube-api-access-bz56g\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436843 4712 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.436852 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmr87\" (UniqueName: \"kubernetes.io/projected/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99-kube-api-access-fmr87\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.460261 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d110bc72-b196-442c-9ea7-f1460a0b2bf4" (UID: "d110bc72-b196-442c-9ea7-f1460a0b2bf4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.497596 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.497676 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.540874 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d110bc72-b196-442c-9ea7-f1460a0b2bf4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.635705 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-jp69c"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.779252 4712 generic.go:334] "Generic (PLEG): container finished" podID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerID="47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca" exitCode=0 Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.779331 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpr8n" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.779336 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerDied","Data":"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.779417 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpr8n" event={"ID":"b4b6ff77-738a-480a-b29c-30a4a0d42182","Type":"ContainerDied","Data":"92715f8aef6b9af3aa96bfc0d5fa2b72ab536c01ed08c64f054bb7b5f3fc7fb9"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.779442 4712 scope.go:117] "RemoveContainer" containerID="47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.781554 4712 generic.go:334] "Generic (PLEG): container finished" podID="86d1e607-e026-4540-a6b0-fab85244efd3" containerID="5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208" exitCode=0 Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.781610 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerDied","Data":"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.781638 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cpm9t" event={"ID":"86d1e607-e026-4540-a6b0-fab85244efd3","Type":"ContainerDied","Data":"dc854ee3e6dca8de7c2379e7af447e0e2d78bb671cab300c4e15e841063ce0af"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.781645 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cpm9t" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.783791 4712 generic.go:334] "Generic (PLEG): container finished" podID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerID="9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4" exitCode=0 Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.783869 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerDied","Data":"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.783901 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" event={"ID":"e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99","Type":"ContainerDied","Data":"745a5b0bed7c89cb58e6aa5149cffacce321df7f13ac2b76036b176c8f17098d"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.783985 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-zkv2d" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.787132 4712 generic.go:334] "Generic (PLEG): container finished" podID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerID="19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff" exitCode=0 Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.787223 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-njk9h" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.787252 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerDied","Data":"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.787280 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-njk9h" event={"ID":"1fd9f25f-4108-411d-a106-3d94f4afe2bf","Type":"ContainerDied","Data":"1fc7e7c03d1d5269d450b0702d103c61ecfc1c4f9cebfccbf7b280c310921154"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.790164 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" event={"ID":"d31cb490-ecb9-4f62-8633-a6239f98d3a2","Type":"ContainerStarted","Data":"0a9df4c4559743f85367d0c482fb2de730533da47acee479ba6d8cf7198dbcd6"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.790217 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" event={"ID":"d31cb490-ecb9-4f62-8633-a6239f98d3a2","Type":"ContainerStarted","Data":"50a496a191c0ccf30aa29fce53d2c469e4f63af6a703afa979c314b0790eead4"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.790741 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.792162 4712 generic.go:334] "Generic (PLEG): container finished" podID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerID="e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb" exitCode=0 Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.792223 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerDied","Data":"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.792242 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djd7s" event={"ID":"d110bc72-b196-442c-9ea7-f1460a0b2bf4","Type":"ContainerDied","Data":"b03cb638853368c53d1b2ee77eeeb100717a65a4207e7dc35192f9e5285c2397"} Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.792357 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djd7s" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.792982 4712 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-jp69c container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.793020 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" podUID="d31cb490-ecb9-4f62-8633-a6239f98d3a2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.800303 4712 scope.go:117] "RemoveContainer" containerID="9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.819414 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.822351 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-zkv2d"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.825569 4712 scope.go:117] "RemoveContainer" containerID="3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.849128 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" podStartSLOduration=1.8491066250000001 podStartE2EDuration="1.849106625s" podCreationTimestamp="2026-01-31 05:45:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:45:42.836716657 +0000 UTC m=+408.930598498" watchObservedRunningTime="2026-01-31 05:45:42.849106625 +0000 UTC m=+408.942988466" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.864461 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.869890 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dpr8n"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.874046 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.877289 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-njk9h"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.880693 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.886917 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cpm9t"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.898497 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.899745 4712 scope.go:117] "RemoveContainer" containerID="47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.900243 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca\": container with ID starting with 47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca not found: ID does not exist" containerID="47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.900276 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca"} err="failed to get container status \"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca\": rpc error: code = NotFound desc = could not find container \"47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca\": container with ID starting with 47c51a37a198ee5568cf3467f1c1e023aa068b2ab2ef6ae3c1d1698c110649ca not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.900297 4712 scope.go:117] "RemoveContainer" containerID="9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.901169 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340\": container with ID starting with 9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340 not found: ID does not exist" containerID="9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.901205 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340"} err="failed to get container status \"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340\": rpc error: code = NotFound desc = could not find container \"9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340\": container with ID starting with 9f9f1ee10e738bc34173c45f7258b055e99c02d38e8083e61a3cd04a52858340 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.901219 4712 scope.go:117] "RemoveContainer" containerID="3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.901461 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3\": container with ID starting with 3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3 not found: ID does not exist" containerID="3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.901481 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3"} err="failed to get container status \"3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3\": rpc error: code = NotFound desc = could not find container \"3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3\": container with ID starting with 3acaa12eaa727030b67e4b25cdfb649d4ef01010b536c43db57d5b864cfdeca3 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.901493 4712 scope.go:117] "RemoveContainer" containerID="5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.901775 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-djd7s"] Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.916649 4712 scope.go:117] "RemoveContainer" containerID="e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.928874 4712 scope.go:117] "RemoveContainer" containerID="f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.951609 4712 scope.go:117] "RemoveContainer" containerID="5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.952113 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208\": container with ID starting with 5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208 not found: ID does not exist" containerID="5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.952165 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208"} err="failed to get container status \"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208\": rpc error: code = NotFound desc = could not find container \"5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208\": container with ID starting with 5fc45c0149c7cab3a2b291b523c1ccf79f660d1d05a3131fdfb1df24236af208 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.952202 4712 scope.go:117] "RemoveContainer" containerID="e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.952679 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1\": container with ID starting with e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1 not found: ID does not exist" containerID="e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.952719 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1"} err="failed to get container status \"e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1\": rpc error: code = NotFound desc = could not find container \"e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1\": container with ID starting with e497e27ee857f97c7c5ee59d0c5a94c38f3615399ceea206d620ab031de8d1c1 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.952748 4712 scope.go:117] "RemoveContainer" containerID="f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.953981 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4\": container with ID starting with f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4 not found: ID does not exist" containerID="f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.954030 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4"} err="failed to get container status \"f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4\": rpc error: code = NotFound desc = could not find container \"f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4\": container with ID starting with f2c64bf93219deb1956cd5bbcca9741c151372b83b8d6c0e272f91a0552021b4 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.954047 4712 scope.go:117] "RemoveContainer" containerID="9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.965937 4712 scope.go:117] "RemoveContainer" containerID="81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.978053 4712 scope.go:117] "RemoveContainer" containerID="9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.978533 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4\": container with ID starting with 9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4 not found: ID does not exist" containerID="9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.978564 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4"} err="failed to get container status \"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4\": rpc error: code = NotFound desc = could not find container \"9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4\": container with ID starting with 9bba3b5342c00a65244624fd4f8c97e287d8f479fd647d041a4f4f00569d7bc4 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.978585 4712 scope.go:117] "RemoveContainer" containerID="81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7" Jan 31 05:45:42 crc kubenswrapper[4712]: E0131 05:45:42.979086 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7\": container with ID starting with 81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7 not found: ID does not exist" containerID="81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.979125 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7"} err="failed to get container status \"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7\": rpc error: code = NotFound desc = could not find container \"81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7\": container with ID starting with 81eca7ba6aff091bab949d630f61289497038ddfab8e7beceafd793270a1dea7 not found: ID does not exist" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.979167 4712 scope.go:117] "RemoveContainer" containerID="19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff" Jan 31 05:45:42 crc kubenswrapper[4712]: I0131 05:45:42.992079 4712 scope.go:117] "RemoveContainer" containerID="d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.010548 4712 scope.go:117] "RemoveContainer" containerID="e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.026013 4712 scope.go:117] "RemoveContainer" containerID="19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.026581 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff\": container with ID starting with 19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff not found: ID does not exist" containerID="19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.026630 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff"} err="failed to get container status \"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff\": rpc error: code = NotFound desc = could not find container \"19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff\": container with ID starting with 19eb80dc67d7069c8233f12fcee3c17b848e8a71e571c5fcc9b99c98122d4aff not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.026674 4712 scope.go:117] "RemoveContainer" containerID="d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.027078 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2\": container with ID starting with d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2 not found: ID does not exist" containerID="d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.027123 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2"} err="failed to get container status \"d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2\": rpc error: code = NotFound desc = could not find container \"d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2\": container with ID starting with d6d50d445bff0093c6db89036b5c36ed7f5a5837a2e379d9c7df191dc0b046a2 not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.027155 4712 scope.go:117] "RemoveContainer" containerID="e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.027461 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8\": container with ID starting with e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8 not found: ID does not exist" containerID="e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.027490 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8"} err="failed to get container status \"e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8\": rpc error: code = NotFound desc = could not find container \"e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8\": container with ID starting with e43af644530353e41430eb31ec357b245b2b309f56fb704c5ae12226cd3a1ff8 not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.027518 4712 scope.go:117] "RemoveContainer" containerID="e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.039929 4712 scope.go:117] "RemoveContainer" containerID="bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.056019 4712 scope.go:117] "RemoveContainer" containerID="e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.068935 4712 scope.go:117] "RemoveContainer" containerID="e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.069489 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb\": container with ID starting with e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb not found: ID does not exist" containerID="e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.069545 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb"} err="failed to get container status \"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb\": rpc error: code = NotFound desc = could not find container \"e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb\": container with ID starting with e2315995530927b41b6ecf0edfc7c6992fd500c9fb0f163922642fa9446e4bfb not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.069594 4712 scope.go:117] "RemoveContainer" containerID="bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.070125 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88\": container with ID starting with bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88 not found: ID does not exist" containerID="bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.070160 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88"} err="failed to get container status \"bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88\": rpc error: code = NotFound desc = could not find container \"bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88\": container with ID starting with bc6fa48fd96616197b5e0359af2f5e91f4327fd43b1085aead34c7f7f26d2d88 not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.070219 4712 scope.go:117] "RemoveContainer" containerID="e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.070752 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082\": container with ID starting with e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082 not found: ID does not exist" containerID="e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.070781 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082"} err="failed to get container status \"e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082\": rpc error: code = NotFound desc = could not find container \"e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082\": container with ID starting with e10f459c0b3f8f8bbac425c5433b7fc91c2231aad3579e8f351a4d4017a55082 not found: ID does not exist" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.809319 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-jp69c" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926675 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gwwfc"] Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.926907 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926919 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.926929 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926937 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.926945 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926953 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.926965 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926971 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.926982 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.926989 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927000 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927036 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927047 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927052 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927059 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927097 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927105 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927111 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927123 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927129 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927137 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927144 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="extract-utilities" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927217 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927226 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="extract-content" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927263 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927272 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: E0131 05:45:43.927281 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927287 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927425 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927437 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927446 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927457 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927465 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" containerName="registry-server" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.927968 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" containerName="marketplace-operator" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.928651 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.930827 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.938848 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwwfc"] Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.956413 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-utilities\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.956455 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84g6z\" (UniqueName: \"kubernetes.io/projected/61c05007-0401-49ee-a678-0b94d67b2d5e-kube-api-access-84g6z\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:43 crc kubenswrapper[4712]: I0131 05:45:43.956500 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-catalog-content\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.057634 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-utilities\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.057689 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84g6z\" (UniqueName: \"kubernetes.io/projected/61c05007-0401-49ee-a678-0b94d67b2d5e-kube-api-access-84g6z\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.057737 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-catalog-content\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.058055 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-utilities\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.058085 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61c05007-0401-49ee-a678-0b94d67b2d5e-catalog-content\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.076470 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84g6z\" (UniqueName: \"kubernetes.io/projected/61c05007-0401-49ee-a678-0b94d67b2d5e-kube-api-access-84g6z\") pod \"certified-operators-gwwfc\" (UID: \"61c05007-0401-49ee-a678-0b94d67b2d5e\") " pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.130352 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qqkm2"] Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.131394 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.134055 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.145676 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqkm2"] Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.169118 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-utilities\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.169611 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwjqx\" (UniqueName: \"kubernetes.io/projected/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-kube-api-access-vwjqx\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.169706 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-catalog-content\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.249183 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.271802 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-utilities\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.271885 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwjqx\" (UniqueName: \"kubernetes.io/projected/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-kube-api-access-vwjqx\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.271919 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-catalog-content\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.274061 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-catalog-content\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.274848 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-utilities\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.294964 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwjqx\" (UniqueName: \"kubernetes.io/projected/f2b022ce-dad8-4c4c-a30b-4e2cbd06553a-kube-api-access-vwjqx\") pod \"redhat-marketplace-qqkm2\" (UID: \"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a\") " pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.453162 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.521248 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fd9f25f-4108-411d-a106-3d94f4afe2bf" path="/var/lib/kubelet/pods/1fd9f25f-4108-411d-a106-3d94f4afe2bf/volumes" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.524264 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86d1e607-e026-4540-a6b0-fab85244efd3" path="/var/lib/kubelet/pods/86d1e607-e026-4540-a6b0-fab85244efd3/volumes" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.526355 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4b6ff77-738a-480a-b29c-30a4a0d42182" path="/var/lib/kubelet/pods/b4b6ff77-738a-480a-b29c-30a4a0d42182/volumes" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.529910 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d110bc72-b196-442c-9ea7-f1460a0b2bf4" path="/var/lib/kubelet/pods/d110bc72-b196-442c-9ea7-f1460a0b2bf4/volumes" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.531400 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99" path="/var/lib/kubelet/pods/e78cf0aa-f8af-4405-b6c5-eb1c52ab1a99/volumes" Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.648520 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwwfc"] Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.679822 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqkm2"] Jan 31 05:45:44 crc kubenswrapper[4712]: W0131 05:45:44.687750 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2b022ce_dad8_4c4c_a30b_4e2cbd06553a.slice/crio-3ba897a00655dbe50932c71cbae63d6666295edc30f7a70d776f4abcfdae2ae7 WatchSource:0}: Error finding container 3ba897a00655dbe50932c71cbae63d6666295edc30f7a70d776f4abcfdae2ae7: Status 404 returned error can't find the container with id 3ba897a00655dbe50932c71cbae63d6666295edc30f7a70d776f4abcfdae2ae7 Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.810128 4712 generic.go:334] "Generic (PLEG): container finished" podID="f2b022ce-dad8-4c4c-a30b-4e2cbd06553a" containerID="a17d2842721feca5e7282a7bc740971e472b101d53754fb489fdf746820288fb" exitCode=0 Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.810366 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqkm2" event={"ID":"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a","Type":"ContainerDied","Data":"a17d2842721feca5e7282a7bc740971e472b101d53754fb489fdf746820288fb"} Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.810435 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqkm2" event={"ID":"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a","Type":"ContainerStarted","Data":"3ba897a00655dbe50932c71cbae63d6666295edc30f7a70d776f4abcfdae2ae7"} Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.814686 4712 generic.go:334] "Generic (PLEG): container finished" podID="61c05007-0401-49ee-a678-0b94d67b2d5e" containerID="7a9752a0fa4cb14c327cc0348a4edbffcb9eae25bd4c961359a2df3774a18b6d" exitCode=0 Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.815004 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwfc" event={"ID":"61c05007-0401-49ee-a678-0b94d67b2d5e","Type":"ContainerDied","Data":"7a9752a0fa4cb14c327cc0348a4edbffcb9eae25bd4c961359a2df3774a18b6d"} Jan 31 05:45:44 crc kubenswrapper[4712]: I0131 05:45:44.815046 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwfc" event={"ID":"61c05007-0401-49ee-a678-0b94d67b2d5e","Type":"ContainerStarted","Data":"8c87f762a6b19fcd954f6ccbe1f2b4cda5c35cb0555408b7f76a8d7d6e02ebad"} Jan 31 05:45:45 crc kubenswrapper[4712]: I0131 05:45:45.825425 4712 generic.go:334] "Generic (PLEG): container finished" podID="61c05007-0401-49ee-a678-0b94d67b2d5e" containerID="f3df6c0c5087effb0a8d2964b59bbe862626a894e82eb78056f1427b76c0dc71" exitCode=0 Jan 31 05:45:45 crc kubenswrapper[4712]: I0131 05:45:45.825522 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwfc" event={"ID":"61c05007-0401-49ee-a678-0b94d67b2d5e","Type":"ContainerDied","Data":"f3df6c0c5087effb0a8d2964b59bbe862626a894e82eb78056f1427b76c0dc71"} Jan 31 05:45:45 crc kubenswrapper[4712]: I0131 05:45:45.829755 4712 generic.go:334] "Generic (PLEG): container finished" podID="f2b022ce-dad8-4c4c-a30b-4e2cbd06553a" containerID="08fda0e8883342bbf507acca26a08507345a5f968e23ed009117aec8975ababd" exitCode=0 Jan 31 05:45:45 crc kubenswrapper[4712]: I0131 05:45:45.829787 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqkm2" event={"ID":"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a","Type":"ContainerDied","Data":"08fda0e8883342bbf507acca26a08507345a5f968e23ed009117aec8975ababd"} Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.343423 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.344881 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.347671 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.370328 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.398163 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.398274 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.398298 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz6mh\" (UniqueName: \"kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.499233 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.499287 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz6mh\" (UniqueName: \"kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.499537 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.499776 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.500023 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.529821 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vtp6q"] Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.531875 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.533835 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.535412 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz6mh\" (UniqueName: \"kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh\") pod \"community-operators-lvdxh\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.541895 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vtp6q"] Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.702015 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.703961 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-catalog-content\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.704121 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nn5s\" (UniqueName: \"kubernetes.io/projected/8a6d2146-9c2b-4913-905f-ed52fb1ac076-kube-api-access-4nn5s\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.704186 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-utilities\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.805309 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nn5s\" (UniqueName: \"kubernetes.io/projected/8a6d2146-9c2b-4913-905f-ed52fb1ac076-kube-api-access-4nn5s\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.805555 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-utilities\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.805581 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-catalog-content\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.806158 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-utilities\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.806724 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a6d2146-9c2b-4913-905f-ed52fb1ac076-catalog-content\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.829358 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nn5s\" (UniqueName: \"kubernetes.io/projected/8a6d2146-9c2b-4913-905f-ed52fb1ac076-kube-api-access-4nn5s\") pod \"redhat-operators-vtp6q\" (UID: \"8a6d2146-9c2b-4913-905f-ed52fb1ac076\") " pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.845267 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwwfc" event={"ID":"61c05007-0401-49ee-a678-0b94d67b2d5e","Type":"ContainerStarted","Data":"6eeec07c502151cd90ec85a300c6b79498df7403e4182e8bd33c48e3301d0ded"} Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.849109 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqkm2" event={"ID":"f2b022ce-dad8-4c4c-a30b-4e2cbd06553a","Type":"ContainerStarted","Data":"1d97e05fbbba13ad13a2468a15a64895f7b537b8167ca48d68d4e2cb10ed3fae"} Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.859093 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.869916 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gwwfc" podStartSLOduration=2.453668749 podStartE2EDuration="3.869898996s" podCreationTimestamp="2026-01-31 05:45:43 +0000 UTC" firstStartedPulling="2026-01-31 05:45:44.816387516 +0000 UTC m=+410.910269357" lastFinishedPulling="2026-01-31 05:45:46.232617763 +0000 UTC m=+412.326499604" observedRunningTime="2026-01-31 05:45:46.868432418 +0000 UTC m=+412.962314279" watchObservedRunningTime="2026-01-31 05:45:46.869898996 +0000 UTC m=+412.963780837" Jan 31 05:45:46 crc kubenswrapper[4712]: I0131 05:45:46.898194 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qqkm2" podStartSLOduration=1.474135875 podStartE2EDuration="2.898142101s" podCreationTimestamp="2026-01-31 05:45:44 +0000 UTC" firstStartedPulling="2026-01-31 05:45:44.812929077 +0000 UTC m=+410.906810918" lastFinishedPulling="2026-01-31 05:45:46.236935303 +0000 UTC m=+412.330817144" observedRunningTime="2026-01-31 05:45:46.890810712 +0000 UTC m=+412.984692553" watchObservedRunningTime="2026-01-31 05:45:46.898142101 +0000 UTC m=+412.992023942" Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.114983 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.309747 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vtp6q"] Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.854931 4712 generic.go:334] "Generic (PLEG): container finished" podID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerID="f193d8ab8e082aa426959dfc4c4d38fbe953bfab7fb081c5648b61f08c47484b" exitCode=0 Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.854980 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerDied","Data":"f193d8ab8e082aa426959dfc4c4d38fbe953bfab7fb081c5648b61f08c47484b"} Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.855033 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerStarted","Data":"caed9e72eb17f5822e923dc34c2d69a8219a714dd22aeaa4e216253fafd46e11"} Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.857889 4712 generic.go:334] "Generic (PLEG): container finished" podID="8a6d2146-9c2b-4913-905f-ed52fb1ac076" containerID="57ee15371e73ae713f8b6900b13f5b110e9bf06b9346e0c05631901db016cb70" exitCode=0 Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.858185 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtp6q" event={"ID":"8a6d2146-9c2b-4913-905f-ed52fb1ac076","Type":"ContainerDied","Data":"57ee15371e73ae713f8b6900b13f5b110e9bf06b9346e0c05631901db016cb70"} Jan 31 05:45:47 crc kubenswrapper[4712]: I0131 05:45:47.858221 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtp6q" event={"ID":"8a6d2146-9c2b-4913-905f-ed52fb1ac076","Type":"ContainerStarted","Data":"4bcbda0b7ca44240555c58fa2e40e03e37d43bb420128842dcf47d707e13d4a8"} Jan 31 05:45:48 crc kubenswrapper[4712]: I0131 05:45:48.865238 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtp6q" event={"ID":"8a6d2146-9c2b-4913-905f-ed52fb1ac076","Type":"ContainerStarted","Data":"23de2547fb8b3236a93d48a3e0dd828124bce6424a3c73ccbd2845a634f640b1"} Jan 31 05:45:48 crc kubenswrapper[4712]: I0131 05:45:48.867899 4712 generic.go:334] "Generic (PLEG): container finished" podID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerID="123298f45a34b31c129acb6212fb447683eda567437cff0a00515d934a5e56d0" exitCode=0 Jan 31 05:45:48 crc kubenswrapper[4712]: I0131 05:45:48.867928 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerDied","Data":"123298f45a34b31c129acb6212fb447683eda567437cff0a00515d934a5e56d0"} Jan 31 05:45:49 crc kubenswrapper[4712]: I0131 05:45:49.876614 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerStarted","Data":"8ac82e7ec95b3e1a5e8b022a48adbe235a4aae53e719a1ec944bad1e4fb0a5d1"} Jan 31 05:45:49 crc kubenswrapper[4712]: I0131 05:45:49.878259 4712 generic.go:334] "Generic (PLEG): container finished" podID="8a6d2146-9c2b-4913-905f-ed52fb1ac076" containerID="23de2547fb8b3236a93d48a3e0dd828124bce6424a3c73ccbd2845a634f640b1" exitCode=0 Jan 31 05:45:49 crc kubenswrapper[4712]: I0131 05:45:49.878311 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtp6q" event={"ID":"8a6d2146-9c2b-4913-905f-ed52fb1ac076","Type":"ContainerDied","Data":"23de2547fb8b3236a93d48a3e0dd828124bce6424a3c73ccbd2845a634f640b1"} Jan 31 05:45:49 crc kubenswrapper[4712]: I0131 05:45:49.898305 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lvdxh" podStartSLOduration=2.476225706 podStartE2EDuration="3.898290233s" podCreationTimestamp="2026-01-31 05:45:46 +0000 UTC" firstStartedPulling="2026-01-31 05:45:47.85638772 +0000 UTC m=+413.950269561" lastFinishedPulling="2026-01-31 05:45:49.278452247 +0000 UTC m=+415.372334088" observedRunningTime="2026-01-31 05:45:49.895642016 +0000 UTC m=+415.989523877" watchObservedRunningTime="2026-01-31 05:45:49.898290233 +0000 UTC m=+415.992172074" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.207271 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" podUID="fecbebbb-c701-40e7-9755-5ff54d25523d" containerName="registry" containerID="cri-o://66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70" gracePeriod=30 Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.619512 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.690554 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.690663 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.690687 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.690722 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691443 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691501 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcq96\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691531 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691568 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691557 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691665 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"fecbebbb-c701-40e7-9755-5ff54d25523d\" (UID: \"fecbebbb-c701-40e7-9755-5ff54d25523d\") " Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691956 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.691975 4712 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.700744 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96" (OuterVolumeSpecName: "kube-api-access-zcq96") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "kube-api-access-zcq96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.708330 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.709568 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.709757 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.710030 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.720016 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "fecbebbb-c701-40e7-9755-5ff54d25523d" (UID: "fecbebbb-c701-40e7-9755-5ff54d25523d"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.792670 4712 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.792705 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcq96\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-kube-api-access-zcq96\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.792716 4712 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fecbebbb-c701-40e7-9755-5ff54d25523d-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.792725 4712 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fecbebbb-c701-40e7-9755-5ff54d25523d-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.792734 4712 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fecbebbb-c701-40e7-9755-5ff54d25523d-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.884791 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtp6q" event={"ID":"8a6d2146-9c2b-4913-905f-ed52fb1ac076","Type":"ContainerStarted","Data":"295122a805dcb8f8d0c4d902eb349ef88a29ef28fcc22676b72c1126d764cd47"} Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.887562 4712 generic.go:334] "Generic (PLEG): container finished" podID="fecbebbb-c701-40e7-9755-5ff54d25523d" containerID="66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70" exitCode=0 Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.888161 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.889286 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" event={"ID":"fecbebbb-c701-40e7-9755-5ff54d25523d","Type":"ContainerDied","Data":"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70"} Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.889344 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nlhkt" event={"ID":"fecbebbb-c701-40e7-9755-5ff54d25523d","Type":"ContainerDied","Data":"e034e259ee43464adfbf05cfdce3a880d7ddd0f5965b083bdd3917482da21652"} Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.889368 4712 scope.go:117] "RemoveContainer" containerID="66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.907716 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vtp6q" podStartSLOduration=2.487068723 podStartE2EDuration="4.907698693s" podCreationTimestamp="2026-01-31 05:45:46 +0000 UTC" firstStartedPulling="2026-01-31 05:45:47.859619212 +0000 UTC m=+413.953501053" lastFinishedPulling="2026-01-31 05:45:50.280249182 +0000 UTC m=+416.374131023" observedRunningTime="2026-01-31 05:45:50.90365095 +0000 UTC m=+416.997532791" watchObservedRunningTime="2026-01-31 05:45:50.907698693 +0000 UTC m=+417.001580544" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.908063 4712 scope.go:117] "RemoveContainer" containerID="66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70" Jan 31 05:45:50 crc kubenswrapper[4712]: E0131 05:45:50.908623 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70\": container with ID starting with 66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70 not found: ID does not exist" containerID="66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.908677 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70"} err="failed to get container status \"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70\": rpc error: code = NotFound desc = could not find container \"66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70\": container with ID starting with 66ab3e83777952ac67c105bbc9dee14ac4b429711f8c41fa887ed0eb7865fb70 not found: ID does not exist" Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.921377 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:45:50 crc kubenswrapper[4712]: I0131 05:45:50.923759 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nlhkt"] Jan 31 05:45:52 crc kubenswrapper[4712]: I0131 05:45:52.510855 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fecbebbb-c701-40e7-9755-5ff54d25523d" path="/var/lib/kubelet/pods/fecbebbb-c701-40e7-9755-5ff54d25523d/volumes" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.249892 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.250288 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.296620 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.458865 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.459470 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.517096 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.947924 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gwwfc" Jan 31 05:45:54 crc kubenswrapper[4712]: I0131 05:45:54.950495 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qqkm2" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.703299 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.703631 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.748064 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.860977 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.861032 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.920181 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.958574 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 05:45:56 crc kubenswrapper[4712]: I0131 05:45:56.958675 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vtp6q" Jan 31 05:46:12 crc kubenswrapper[4712]: I0131 05:46:12.497997 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:46:12 crc kubenswrapper[4712]: I0131 05:46:12.498713 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:46:12 crc kubenswrapper[4712]: I0131 05:46:12.498775 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:46:12 crc kubenswrapper[4712]: I0131 05:46:12.499454 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 05:46:12 crc kubenswrapper[4712]: I0131 05:46:12.499522 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5" gracePeriod=600 Jan 31 05:46:13 crc kubenswrapper[4712]: I0131 05:46:13.031147 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5" exitCode=0 Jan 31 05:46:13 crc kubenswrapper[4712]: I0131 05:46:13.031754 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5"} Jan 31 05:46:13 crc kubenswrapper[4712]: I0131 05:46:13.032002 4712 scope.go:117] "RemoveContainer" containerID="fce20be6706dbed5476c38e4c266760acabeb26b1b15afbb0efe687dffc27c44" Jan 31 05:46:14 crc kubenswrapper[4712]: I0131 05:46:14.041146 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d"} Jan 31 05:48:42 crc kubenswrapper[4712]: I0131 05:48:42.498535 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:48:42 crc kubenswrapper[4712]: I0131 05:48:42.499629 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:49:12 crc kubenswrapper[4712]: I0131 05:49:12.497477 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:49:12 crc kubenswrapper[4712]: I0131 05:49:12.498346 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:49:42 crc kubenswrapper[4712]: I0131 05:49:42.497714 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:49:42 crc kubenswrapper[4712]: I0131 05:49:42.498775 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:49:42 crc kubenswrapper[4712]: I0131 05:49:42.498938 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:49:42 crc kubenswrapper[4712]: I0131 05:49:42.500158 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 05:49:42 crc kubenswrapper[4712]: I0131 05:49:42.500303 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d" gracePeriod=600 Jan 31 05:49:43 crc kubenswrapper[4712]: I0131 05:49:43.433638 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d" exitCode=0 Jan 31 05:49:43 crc kubenswrapper[4712]: I0131 05:49:43.433738 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d"} Jan 31 05:49:43 crc kubenswrapper[4712]: I0131 05:49:43.434637 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8"} Jan 31 05:49:43 crc kubenswrapper[4712]: I0131 05:49:43.434691 4712 scope.go:117] "RemoveContainer" containerID="665f0601f5bd5cd4080af883b309ab10a9cf5bd861b816ffdf4888ace3070ed5" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.196912 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-m4x85"] Jan 31 05:50:45 crc kubenswrapper[4712]: E0131 05:50:45.197875 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecbebbb-c701-40e7-9755-5ff54d25523d" containerName="registry" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.197948 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecbebbb-c701-40e7-9755-5ff54d25523d" containerName="registry" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.198073 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="fecbebbb-c701-40e7-9755-5ff54d25523d" containerName="registry" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.198579 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.200816 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.201097 4712 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-mbqnp" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.201472 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.211040 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-m4x85"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.216700 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-rzlv8"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.218355 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-rzlv8" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.221658 4712 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dgzgf" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.245988 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-rzlv8"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.256568 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-cq69c"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.257470 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.259870 4712 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dnlcl" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.263413 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-cq69c"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.368107 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2djt\" (UniqueName: \"kubernetes.io/projected/8a4a4023-5949-4fb1-b75d-375705a6ccd5-kube-api-access-f2djt\") pod \"cert-manager-cainjector-cf98fcc89-m4x85\" (UID: \"8a4a4023-5949-4fb1-b75d-375705a6ccd5\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.368314 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsvwb\" (UniqueName: \"kubernetes.io/projected/d6da4569-9d12-457b-b448-2a96889fd6d0-kube-api-access-hsvwb\") pod \"cert-manager-858654f9db-rzlv8\" (UID: \"d6da4569-9d12-457b-b448-2a96889fd6d0\") " pod="cert-manager/cert-manager-858654f9db-rzlv8" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.369055 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h85hr\" (UniqueName: \"kubernetes.io/projected/7a3564dd-edd4-4e23-b910-084279771f4a-kube-api-access-h85hr\") pod \"cert-manager-webhook-687f57d79b-cq69c\" (UID: \"7a3564dd-edd4-4e23-b910-084279771f4a\") " pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.470072 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h85hr\" (UniqueName: \"kubernetes.io/projected/7a3564dd-edd4-4e23-b910-084279771f4a-kube-api-access-h85hr\") pod \"cert-manager-webhook-687f57d79b-cq69c\" (UID: \"7a3564dd-edd4-4e23-b910-084279771f4a\") " pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.470205 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2djt\" (UniqueName: \"kubernetes.io/projected/8a4a4023-5949-4fb1-b75d-375705a6ccd5-kube-api-access-f2djt\") pod \"cert-manager-cainjector-cf98fcc89-m4x85\" (UID: \"8a4a4023-5949-4fb1-b75d-375705a6ccd5\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.470245 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsvwb\" (UniqueName: \"kubernetes.io/projected/d6da4569-9d12-457b-b448-2a96889fd6d0-kube-api-access-hsvwb\") pod \"cert-manager-858654f9db-rzlv8\" (UID: \"d6da4569-9d12-457b-b448-2a96889fd6d0\") " pod="cert-manager/cert-manager-858654f9db-rzlv8" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.497357 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h85hr\" (UniqueName: \"kubernetes.io/projected/7a3564dd-edd4-4e23-b910-084279771f4a-kube-api-access-h85hr\") pod \"cert-manager-webhook-687f57d79b-cq69c\" (UID: \"7a3564dd-edd4-4e23-b910-084279771f4a\") " pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.498228 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsvwb\" (UniqueName: \"kubernetes.io/projected/d6da4569-9d12-457b-b448-2a96889fd6d0-kube-api-access-hsvwb\") pod \"cert-manager-858654f9db-rzlv8\" (UID: \"d6da4569-9d12-457b-b448-2a96889fd6d0\") " pod="cert-manager/cert-manager-858654f9db-rzlv8" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.498259 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2djt\" (UniqueName: \"kubernetes.io/projected/8a4a4023-5949-4fb1-b75d-375705a6ccd5-kube-api-access-f2djt\") pod \"cert-manager-cainjector-cf98fcc89-m4x85\" (UID: \"8a4a4023-5949-4fb1-b75d-375705a6ccd5\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.521871 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.542006 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-rzlv8" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.578668 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.886203 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-cq69c"] Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.896379 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.901977 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" event={"ID":"7a3564dd-edd4-4e23-b910-084279771f4a","Type":"ContainerStarted","Data":"9d911bdd58153895ee1070f7d30f9fb38aadd601cb5cf41d270dd4c92a791ddb"} Jan 31 05:50:45 crc kubenswrapper[4712]: I0131 05:50:45.994380 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-m4x85"] Jan 31 05:50:46 crc kubenswrapper[4712]: I0131 05:50:46.010708 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-rzlv8"] Jan 31 05:50:46 crc kubenswrapper[4712]: W0131 05:50:46.014113 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6da4569_9d12_457b_b448_2a96889fd6d0.slice/crio-b7e779585c3eceb69ed1161f498f0c32587f74627992c3a362bc4e82d5ee3108 WatchSource:0}: Error finding container b7e779585c3eceb69ed1161f498f0c32587f74627992c3a362bc4e82d5ee3108: Status 404 returned error can't find the container with id b7e779585c3eceb69ed1161f498f0c32587f74627992c3a362bc4e82d5ee3108 Jan 31 05:50:46 crc kubenswrapper[4712]: I0131 05:50:46.915532 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" event={"ID":"8a4a4023-5949-4fb1-b75d-375705a6ccd5","Type":"ContainerStarted","Data":"af3e985b27852b6ecf3cd0389df16c72dacca04116184334796fcb8ef8945806"} Jan 31 05:50:46 crc kubenswrapper[4712]: I0131 05:50:46.916595 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-rzlv8" event={"ID":"d6da4569-9d12-457b-b448-2a96889fd6d0","Type":"ContainerStarted","Data":"b7e779585c3eceb69ed1161f498f0c32587f74627992c3a362bc4e82d5ee3108"} Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.948769 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" event={"ID":"7a3564dd-edd4-4e23-b910-084279771f4a","Type":"ContainerStarted","Data":"04ec1d5ee3e41a71439dc1635b0d2993997674f5c14dca22bc8d5a61a62cd747"} Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.949981 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.953807 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-rzlv8" event={"ID":"d6da4569-9d12-457b-b448-2a96889fd6d0","Type":"ContainerStarted","Data":"4dd56f1c88a0ced7e85f5bed42fb9b19aab164bbf352011ab12c21d9ec324e4d"} Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.955751 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" event={"ID":"8a4a4023-5949-4fb1-b75d-375705a6ccd5","Type":"ContainerStarted","Data":"2b3c325b18a67de00ae48baf3f4543812f718200655b67a4e75445edd9d7779f"} Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.972765 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" podStartSLOduration=1.191435655 podStartE2EDuration="4.972750804s" podCreationTimestamp="2026-01-31 05:50:45 +0000 UTC" firstStartedPulling="2026-01-31 05:50:45.896071406 +0000 UTC m=+711.989953247" lastFinishedPulling="2026-01-31 05:50:49.677386555 +0000 UTC m=+715.771268396" observedRunningTime="2026-01-31 05:50:49.969577247 +0000 UTC m=+716.063459088" watchObservedRunningTime="2026-01-31 05:50:49.972750804 +0000 UTC m=+716.066632645" Jan 31 05:50:49 crc kubenswrapper[4712]: I0131 05:50:49.990188 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-rzlv8" podStartSLOduration=1.344920373 podStartE2EDuration="4.990157221s" podCreationTimestamp="2026-01-31 05:50:45 +0000 UTC" firstStartedPulling="2026-01-31 05:50:46.016464799 +0000 UTC m=+712.110346640" lastFinishedPulling="2026-01-31 05:50:49.661701617 +0000 UTC m=+715.755583488" observedRunningTime="2026-01-31 05:50:49.988293547 +0000 UTC m=+716.082175388" watchObservedRunningTime="2026-01-31 05:50:49.990157221 +0000 UTC m=+716.084039062" Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.906852 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-m4x85" podStartSLOduration=6.235719265 podStartE2EDuration="9.90683089s" podCreationTimestamp="2026-01-31 05:50:45 +0000 UTC" firstStartedPulling="2026-01-31 05:50:46.003435111 +0000 UTC m=+712.097316982" lastFinishedPulling="2026-01-31 05:50:49.674546766 +0000 UTC m=+715.768428607" observedRunningTime="2026-01-31 05:50:50.021819292 +0000 UTC m=+716.115701143" watchObservedRunningTime="2026-01-31 05:50:54.90683089 +0000 UTC m=+721.000712731" Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.913700 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6r6bn"] Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914363 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="nbdb" containerID="cri-o://262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914442 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-acl-logging" containerID="cri-o://4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914461 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-node" containerID="cri-o://2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914549 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="sbdb" containerID="cri-o://3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914376 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914463 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="northd" containerID="cri-o://d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" gracePeriod=30 Jan 31 05:50:54 crc kubenswrapper[4712]: I0131 05:50:54.914370 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-controller" containerID="cri-o://9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" gracePeriod=30 Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.026679 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" containerID="cri-o://7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" gracePeriod=30 Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.372221 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/3.log" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.374477 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovn-acl-logging/0.log" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.375007 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovn-controller/0.log" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.375477 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440398 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cxw89"] Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440659 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440679 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440700 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="northd" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440708 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="northd" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440721 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kubecfg-setup" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440731 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kubecfg-setup" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440741 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-node" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440751 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-node" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440763 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="sbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440771 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="sbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440781 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440789 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440799 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440807 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440819 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-ovn-metrics" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440827 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-ovn-metrics" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440838 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440846 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440856 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440864 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440875 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-acl-logging" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440883 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-acl-logging" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.440895 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="nbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.440903 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="nbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441026 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-acl-logging" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441038 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441049 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="nbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441067 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="sbdb" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441079 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-node" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441091 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441102 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="kube-rbac-proxy-ovn-metrics" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441111 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="northd" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441122 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441131 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441142 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovn-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: E0131 05:50:55.441306 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441316 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.441439 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerName="ovnkube-controller" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.443811 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467609 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467680 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467688 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467724 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467763 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467789 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467823 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467866 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467905 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467928 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467966 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.467997 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468026 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468025 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468058 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468075 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468089 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468107 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468147 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468226 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468235 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468256 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468234 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket" (OuterVolumeSpecName: "log-socket") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468307 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468348 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468366 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash" (OuterVolumeSpecName: "host-slash") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468393 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468430 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468438 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xdhx\" (UniqueName: \"kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx\") pod \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\" (UID: \"2f522e2e-c0c8-44a7-b834-ac367dba0c9c\") " Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468654 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468686 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468709 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468732 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log" (OuterVolumeSpecName: "node-log") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468752 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468822 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-bin\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469005 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-node-log\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469071 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-var-lib-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469101 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-kubelet\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.468903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469080 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469224 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-etc-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469254 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-netd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469275 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-config\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469279 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469295 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469367 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-script-lib\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469406 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469462 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-ovn\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469517 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-log-socket\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469580 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-netns\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469617 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-env-overrides\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469641 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-systemd-units\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469672 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-systemd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469702 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469736 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85m75\" (UniqueName: \"kubernetes.io/projected/05292ea6-aa2b-48b8-adaa-91002b1d615c-kube-api-access-85m75\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469768 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-slash\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469800 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovn-node-metrics-cert\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469873 4712 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469888 4712 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469903 4712 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469916 4712 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469928 4712 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-log-socket\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469939 4712 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469952 4712 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469964 4712 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469976 4712 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.469989 4712 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-slash\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470000 4712 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470014 4712 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470030 4712 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-node-log\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470049 4712 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470066 4712 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470083 4712 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.470096 4712 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.476419 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx" (OuterVolumeSpecName: "kube-api-access-8xdhx") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "kube-api-access-8xdhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.478263 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.488233 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "2f522e2e-c0c8-44a7-b834-ac367dba0c9c" (UID: "2f522e2e-c0c8-44a7-b834-ac367dba0c9c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571591 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-script-lib\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571691 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571738 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-ovn\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571772 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-log-socket\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571820 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-netns\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571850 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-env-overrides\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571848 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571875 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-systemd-units\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571929 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-systemd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571962 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571964 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-ovn\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571991 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-run-netns\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.571997 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85m75\" (UniqueName: \"kubernetes.io/projected/05292ea6-aa2b-48b8-adaa-91002b1d615c-kube-api-access-85m75\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572086 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-log-socket\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572104 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-slash\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572128 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovn-node-metrics-cert\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572129 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-systemd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572188 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-systemd-units\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572223 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-bin\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572255 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-node-log\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572256 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572298 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-var-lib-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572323 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-kubelet\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572375 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-etc-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572394 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-netd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572418 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-config\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572226 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-slash\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572471 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572446 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-var-lib-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572502 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-netd\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572507 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-etc-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572475 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-node-log\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572298 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-cni-bin\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572449 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-run-openvswitch\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572628 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/05292ea6-aa2b-48b8-adaa-91002b1d615c-host-kubelet\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572680 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-env-overrides\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572738 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xdhx\" (UniqueName: \"kubernetes.io/projected/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-kube-api-access-8xdhx\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572754 4712 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572768 4712 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f522e2e-c0c8-44a7-b834-ac367dba0c9c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.572944 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-script-lib\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.573151 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovnkube-config\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.575872 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/05292ea6-aa2b-48b8-adaa-91002b1d615c-ovn-node-metrics-cert\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.581633 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-cq69c" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.611465 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85m75\" (UniqueName: \"kubernetes.io/projected/05292ea6-aa2b-48b8-adaa-91002b1d615c-kube-api-access-85m75\") pod \"ovnkube-node-cxw89\" (UID: \"05292ea6-aa2b-48b8-adaa-91002b1d615c\") " pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: I0131 05:50:55.763387 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:50:55 crc kubenswrapper[4712]: W0131 05:50:55.788631 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05292ea6_aa2b_48b8_adaa_91002b1d615c.slice/crio-22a939e4491fac23f6e832aca4cdcc0467d026e2bceddc9e5842ef7c6530d666 WatchSource:0}: Error finding container 22a939e4491fac23f6e832aca4cdcc0467d026e2bceddc9e5842ef7c6530d666: Status 404 returned error can't find the container with id 22a939e4491fac23f6e832aca4cdcc0467d026e2bceddc9e5842ef7c6530d666 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.073074 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/2.log" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.074353 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/1.log" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.074406 4712 generic.go:334] "Generic (PLEG): container finished" podID="f4943935-d884-4777-b679-bfabc7235a23" containerID="d90ab44100fbd461ca9ec5eb5b37bded0b506688933f0dd115ab5f7d8779ceab" exitCode=2 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.074482 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerDied","Data":"d90ab44100fbd461ca9ec5eb5b37bded0b506688933f0dd115ab5f7d8779ceab"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.074529 4712 scope.go:117] "RemoveContainer" containerID="1e4a207c1d06cbbf408d8a4b402dd5269b1374606a2de61e53adbf158e3960c6" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.075004 4712 scope.go:117] "RemoveContainer" containerID="d90ab44100fbd461ca9ec5eb5b37bded0b506688933f0dd115ab5f7d8779ceab" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.076663 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-zbfp7_openshift-multus(f4943935-d884-4777-b679-bfabc7235a23)\"" pod="openshift-multus/multus-zbfp7" podUID="f4943935-d884-4777-b679-bfabc7235a23" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.083066 4712 generic.go:334] "Generic (PLEG): container finished" podID="05292ea6-aa2b-48b8-adaa-91002b1d615c" containerID="2a74200f1455cbcc1d574d333ad86992f2bc5df446b93692a55e1205d8e64989" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.083140 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerDied","Data":"2a74200f1455cbcc1d574d333ad86992f2bc5df446b93692a55e1205d8e64989"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.083183 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"22a939e4491fac23f6e832aca4cdcc0467d026e2bceddc9e5842ef7c6530d666"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.086769 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovnkube-controller/3.log" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.090908 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovn-acl-logging/0.log" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.092140 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6r6bn_2f522e2e-c0c8-44a7-b834-ac367dba0c9c/ovn-controller/0.log" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094327 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094367 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094383 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094396 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094406 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094391 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094478 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094497 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094512 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094528 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094540 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094555 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094571 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094416 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" exitCode=0 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094583 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094629 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" exitCode=143 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094672 4712 generic.go:334] "Generic (PLEG): container finished" podID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" exitCode=143 Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094580 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094722 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094752 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094771 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094788 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094804 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094816 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094828 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094862 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094910 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094976 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.094989 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095001 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095014 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095025 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095080 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095095 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095106 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095206 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095229 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095252 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095265 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095282 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095296 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095307 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095319 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095334 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095346 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095358 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095370 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095392 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6r6bn" event={"ID":"2f522e2e-c0c8-44a7-b834-ac367dba0c9c","Type":"ContainerDied","Data":"f1154e1c1699bba78ba81f65dec62030476ca9f07eddbc32e2d2650acba2eed6"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095413 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095429 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095441 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095453 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095465 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095476 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095488 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095500 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095511 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.095523 4712 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.134265 4712 scope.go:117] "RemoveContainer" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.171701 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6r6bn"] Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.177465 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6r6bn"] Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.187015 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.217296 4712 scope.go:117] "RemoveContainer" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.240288 4712 scope.go:117] "RemoveContainer" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.255402 4712 scope.go:117] "RemoveContainer" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.276164 4712 scope.go:117] "RemoveContainer" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.298384 4712 scope.go:117] "RemoveContainer" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.313031 4712 scope.go:117] "RemoveContainer" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.337952 4712 scope.go:117] "RemoveContainer" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.370415 4712 scope.go:117] "RemoveContainer" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.391249 4712 scope.go:117] "RemoveContainer" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.391934 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": container with ID starting with 7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de not found: ID does not exist" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.392001 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} err="failed to get container status \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": rpc error: code = NotFound desc = could not find container \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": container with ID starting with 7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.392028 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.393030 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": container with ID starting with ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8 not found: ID does not exist" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.393093 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} err="failed to get container status \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": rpc error: code = NotFound desc = could not find container \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": container with ID starting with ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.393139 4712 scope.go:117] "RemoveContainer" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.393671 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": container with ID starting with 3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931 not found: ID does not exist" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.393795 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} err="failed to get container status \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": rpc error: code = NotFound desc = could not find container \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": container with ID starting with 3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.393971 4712 scope.go:117] "RemoveContainer" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.395046 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": container with ID starting with 262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f not found: ID does not exist" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395067 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} err="failed to get container status \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": rpc error: code = NotFound desc = could not find container \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": container with ID starting with 262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395083 4712 scope.go:117] "RemoveContainer" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.395358 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": container with ID starting with d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0 not found: ID does not exist" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395391 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} err="failed to get container status \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": rpc error: code = NotFound desc = could not find container \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": container with ID starting with d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395413 4712 scope.go:117] "RemoveContainer" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.395654 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": container with ID starting with c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165 not found: ID does not exist" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395681 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} err="failed to get container status \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": rpc error: code = NotFound desc = could not find container \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": container with ID starting with c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395711 4712 scope.go:117] "RemoveContainer" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.395961 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": container with ID starting with 2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328 not found: ID does not exist" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.395998 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} err="failed to get container status \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": rpc error: code = NotFound desc = could not find container \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": container with ID starting with 2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.396020 4712 scope.go:117] "RemoveContainer" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.396456 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": container with ID starting with 4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60 not found: ID does not exist" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.396531 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} err="failed to get container status \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": rpc error: code = NotFound desc = could not find container \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": container with ID starting with 4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.396577 4712 scope.go:117] "RemoveContainer" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.396963 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": container with ID starting with 9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a not found: ID does not exist" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.396998 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} err="failed to get container status \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": rpc error: code = NotFound desc = could not find container \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": container with ID starting with 9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.397016 4712 scope.go:117] "RemoveContainer" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: E0131 05:50:56.397384 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": container with ID starting with b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9 not found: ID does not exist" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.397420 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} err="failed to get container status \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": rpc error: code = NotFound desc = could not find container \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": container with ID starting with b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.397447 4712 scope.go:117] "RemoveContainer" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.397755 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} err="failed to get container status \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": rpc error: code = NotFound desc = could not find container \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": container with ID starting with 7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.397790 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.398198 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} err="failed to get container status \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": rpc error: code = NotFound desc = could not find container \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": container with ID starting with ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.398222 4712 scope.go:117] "RemoveContainer" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.398827 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} err="failed to get container status \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": rpc error: code = NotFound desc = could not find container \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": container with ID starting with 3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.398862 4712 scope.go:117] "RemoveContainer" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.399321 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} err="failed to get container status \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": rpc error: code = NotFound desc = could not find container \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": container with ID starting with 262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.399343 4712 scope.go:117] "RemoveContainer" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.399695 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} err="failed to get container status \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": rpc error: code = NotFound desc = could not find container \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": container with ID starting with d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.399726 4712 scope.go:117] "RemoveContainer" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400023 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} err="failed to get container status \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": rpc error: code = NotFound desc = could not find container \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": container with ID starting with c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400048 4712 scope.go:117] "RemoveContainer" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400405 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} err="failed to get container status \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": rpc error: code = NotFound desc = could not find container \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": container with ID starting with 2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400432 4712 scope.go:117] "RemoveContainer" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400852 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} err="failed to get container status \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": rpc error: code = NotFound desc = could not find container \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": container with ID starting with 4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.400950 4712 scope.go:117] "RemoveContainer" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.401328 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} err="failed to get container status \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": rpc error: code = NotFound desc = could not find container \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": container with ID starting with 9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.401348 4712 scope.go:117] "RemoveContainer" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.401808 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} err="failed to get container status \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": rpc error: code = NotFound desc = could not find container \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": container with ID starting with b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.401836 4712 scope.go:117] "RemoveContainer" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.402260 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} err="failed to get container status \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": rpc error: code = NotFound desc = could not find container \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": container with ID starting with 7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.402305 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.403465 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} err="failed to get container status \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": rpc error: code = NotFound desc = could not find container \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": container with ID starting with ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.403490 4712 scope.go:117] "RemoveContainer" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.403831 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} err="failed to get container status \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": rpc error: code = NotFound desc = could not find container \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": container with ID starting with 3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.403873 4712 scope.go:117] "RemoveContainer" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404153 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} err="failed to get container status \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": rpc error: code = NotFound desc = could not find container \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": container with ID starting with 262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404287 4712 scope.go:117] "RemoveContainer" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404629 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} err="failed to get container status \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": rpc error: code = NotFound desc = could not find container \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": container with ID starting with d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404662 4712 scope.go:117] "RemoveContainer" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404954 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} err="failed to get container status \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": rpc error: code = NotFound desc = could not find container \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": container with ID starting with c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.404991 4712 scope.go:117] "RemoveContainer" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405368 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} err="failed to get container status \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": rpc error: code = NotFound desc = could not find container \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": container with ID starting with 2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405390 4712 scope.go:117] "RemoveContainer" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405642 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} err="failed to get container status \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": rpc error: code = NotFound desc = could not find container \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": container with ID starting with 4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405674 4712 scope.go:117] "RemoveContainer" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405899 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} err="failed to get container status \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": rpc error: code = NotFound desc = could not find container \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": container with ID starting with 9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.405927 4712 scope.go:117] "RemoveContainer" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406236 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} err="failed to get container status \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": rpc error: code = NotFound desc = could not find container \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": container with ID starting with b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406266 4712 scope.go:117] "RemoveContainer" containerID="7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406533 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de"} err="failed to get container status \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": rpc error: code = NotFound desc = could not find container \"7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de\": container with ID starting with 7b0c280eb613878711526fc82f172055e06721b99e1ee963b57b70c2de10d7de not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406562 4712 scope.go:117] "RemoveContainer" containerID="ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406915 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8"} err="failed to get container status \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": rpc error: code = NotFound desc = could not find container \"ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8\": container with ID starting with ec6601eb847050b536d027760c1c2a60a7dfcd52e169b1b0906639e9a9aaabe8 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.406961 4712 scope.go:117] "RemoveContainer" containerID="3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.407578 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931"} err="failed to get container status \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": rpc error: code = NotFound desc = could not find container \"3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931\": container with ID starting with 3ed3902525e721cf2db8680b5973b31caec662ae683bdacffd76ecc23a2c8931 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.407627 4712 scope.go:117] "RemoveContainer" containerID="262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.407931 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f"} err="failed to get container status \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": rpc error: code = NotFound desc = could not find container \"262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f\": container with ID starting with 262eb809aa8ec73b2f9ec05e766db803e1459f6f0610016113e751cd2d7ffd3f not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.407955 4712 scope.go:117] "RemoveContainer" containerID="d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.408710 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0"} err="failed to get container status \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": rpc error: code = NotFound desc = could not find container \"d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0\": container with ID starting with d7f5d32cf3302a0227aa5f754229a6c661936a5aa9656bc73bb4b415031274c0 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.408743 4712 scope.go:117] "RemoveContainer" containerID="c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409004 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165"} err="failed to get container status \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": rpc error: code = NotFound desc = could not find container \"c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165\": container with ID starting with c49757025e0a356179557610a6b907c85a26ba5756e3fda59324761e01eae165 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409030 4712 scope.go:117] "RemoveContainer" containerID="2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409457 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328"} err="failed to get container status \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": rpc error: code = NotFound desc = could not find container \"2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328\": container with ID starting with 2f85032416f3d6ec2860577ebd65963ceeea889e66fb93c4e091a52cb0720328 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409500 4712 scope.go:117] "RemoveContainer" containerID="4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409766 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60"} err="failed to get container status \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": rpc error: code = NotFound desc = could not find container \"4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60\": container with ID starting with 4ba4d3eb8a0a69533ff09dc8560cc2547157cd7e5a18a2da8e9019f670e30b60 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.409792 4712 scope.go:117] "RemoveContainer" containerID="9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.410077 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a"} err="failed to get container status \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": rpc error: code = NotFound desc = could not find container \"9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a\": container with ID starting with 9675f98a12cb3c2c23ebe516a0817ec1d6a85493a47c5ced5d0fe06cd094848a not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.410143 4712 scope.go:117] "RemoveContainer" containerID="b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.410478 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9"} err="failed to get container status \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": rpc error: code = NotFound desc = could not find container \"b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9\": container with ID starting with b5da115f6868f5fe664f62a2d472fc1ad92a460faf7641bcf7b76a7eb90461c9 not found: ID does not exist" Jan 31 05:50:56 crc kubenswrapper[4712]: I0131 05:50:56.517264 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f522e2e-c0c8-44a7-b834-ac367dba0c9c" path="/var/lib/kubelet/pods/2f522e2e-c0c8-44a7-b834-ac367dba0c9c/volumes" Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106127 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"b51010b6fa1f823139bc49faa6f983fcda6924ec3cc15807ba027f18e35c1ceb"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106590 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"e3a20673b4982f3870489dc47da5a5b24c77b5983884030e239ebb058134397b"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106607 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"69be533cae01952aa5b93be547635083cd5d02d581bbe1c9b97b6255f15e5691"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106619 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"6df50119cc9af0c4c214be7b3b320ac3c3e8b24d9d6da25939f09304cadc0357"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106631 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"7305f2aa623140b9a5605e3cfe080f7c8598b3661e12731aca3f6c8576861026"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.106646 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"8c5395e759cfa685e0f4493796b540699a5a4996f91cddae05bc15dfb805f619"} Jan 31 05:50:57 crc kubenswrapper[4712]: I0131 05:50:57.109491 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/2.log" Jan 31 05:51:00 crc kubenswrapper[4712]: I0131 05:51:00.139264 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"5212dc8fb6334a3038f6380cb8816adb558431873fba871539f72b984a89a752"} Jan 31 05:51:02 crc kubenswrapper[4712]: I0131 05:51:02.159572 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" event={"ID":"05292ea6-aa2b-48b8-adaa-91002b1d615c","Type":"ContainerStarted","Data":"9e2dd35df862a2a844e3fd4975f4dc445b1f00820d0f3af6baa28c4a259dd4b7"} Jan 31 05:51:02 crc kubenswrapper[4712]: I0131 05:51:02.160380 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:02 crc kubenswrapper[4712]: I0131 05:51:02.160401 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:02 crc kubenswrapper[4712]: I0131 05:51:02.196093 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" podStartSLOduration=7.196061532 podStartE2EDuration="7.196061532s" podCreationTimestamp="2026-01-31 05:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:51:02.193395128 +0000 UTC m=+728.287276959" watchObservedRunningTime="2026-01-31 05:51:02.196061532 +0000 UTC m=+728.289943373" Jan 31 05:51:02 crc kubenswrapper[4712]: I0131 05:51:02.206822 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:03 crc kubenswrapper[4712]: I0131 05:51:03.165940 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:03 crc kubenswrapper[4712]: I0131 05:51:03.206252 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:07 crc kubenswrapper[4712]: I0131 05:51:07.505637 4712 scope.go:117] "RemoveContainer" containerID="d90ab44100fbd461ca9ec5eb5b37bded0b506688933f0dd115ab5f7d8779ceab" Jan 31 05:51:08 crc kubenswrapper[4712]: I0131 05:51:08.197495 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zbfp7_f4943935-d884-4777-b679-bfabc7235a23/kube-multus/2.log" Jan 31 05:51:08 crc kubenswrapper[4712]: I0131 05:51:08.198001 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zbfp7" event={"ID":"f4943935-d884-4777-b679-bfabc7235a23","Type":"ContainerStarted","Data":"98297439a08d6a1cbe0f1a46457924b7621281a10fe87c12a5a0c574eb0fb1b5"} Jan 31 05:51:25 crc kubenswrapper[4712]: I0131 05:51:25.791686 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cxw89" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.711599 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx"] Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.713672 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.715736 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.721973 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx"] Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.809591 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bszr\" (UniqueName: \"kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.809659 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.809766 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.911082 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bszr\" (UniqueName: \"kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.911145 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.911239 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.911730 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.912047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:32 crc kubenswrapper[4712]: I0131 05:51:32.947133 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bszr\" (UniqueName: \"kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:33 crc kubenswrapper[4712]: I0131 05:51:33.063108 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:33 crc kubenswrapper[4712]: I0131 05:51:33.316499 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx"] Jan 31 05:51:33 crc kubenswrapper[4712]: I0131 05:51:33.380941 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" event={"ID":"dd700cd6-556a-4331-b411-5551352a6b8f","Type":"ContainerStarted","Data":"1401a4255b151abf5cedc37d3b7346920775ec20760e4aa7e9bb2ab8ffbc9f81"} Jan 31 05:51:34 crc kubenswrapper[4712]: I0131 05:51:34.389340 4712 generic.go:334] "Generic (PLEG): container finished" podID="dd700cd6-556a-4331-b411-5551352a6b8f" containerID="26c3344d1766de0e230839c49a7af2d93133257d0173ec6e20b90d10f5b7e02f" exitCode=0 Jan 31 05:51:34 crc kubenswrapper[4712]: I0131 05:51:34.389636 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" event={"ID":"dd700cd6-556a-4331-b411-5551352a6b8f","Type":"ContainerDied","Data":"26c3344d1766de0e230839c49a7af2d93133257d0173ec6e20b90d10f5b7e02f"} Jan 31 05:51:36 crc kubenswrapper[4712]: I0131 05:51:36.405313 4712 generic.go:334] "Generic (PLEG): container finished" podID="dd700cd6-556a-4331-b411-5551352a6b8f" containerID="d210d161ef02a525da99ed508d4e28b9592a6b568ad4f58cef59cfbaaf385746" exitCode=0 Jan 31 05:51:36 crc kubenswrapper[4712]: I0131 05:51:36.405503 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" event={"ID":"dd700cd6-556a-4331-b411-5551352a6b8f","Type":"ContainerDied","Data":"d210d161ef02a525da99ed508d4e28b9592a6b568ad4f58cef59cfbaaf385746"} Jan 31 05:51:37 crc kubenswrapper[4712]: I0131 05:51:37.416376 4712 generic.go:334] "Generic (PLEG): container finished" podID="dd700cd6-556a-4331-b411-5551352a6b8f" containerID="e759ca8057c5df6f24e96f1032d849841d986fcbf3ceb4681e6077dd6aa9aefc" exitCode=0 Jan 31 05:51:37 crc kubenswrapper[4712]: I0131 05:51:37.416426 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" event={"ID":"dd700cd6-556a-4331-b411-5551352a6b8f","Type":"ContainerDied","Data":"e759ca8057c5df6f24e96f1032d849841d986fcbf3ceb4681e6077dd6aa9aefc"} Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.754065 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.808704 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle\") pod \"dd700cd6-556a-4331-b411-5551352a6b8f\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.808898 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util\") pod \"dd700cd6-556a-4331-b411-5551352a6b8f\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.809045 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bszr\" (UniqueName: \"kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr\") pod \"dd700cd6-556a-4331-b411-5551352a6b8f\" (UID: \"dd700cd6-556a-4331-b411-5551352a6b8f\") " Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.809271 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle" (OuterVolumeSpecName: "bundle") pod "dd700cd6-556a-4331-b411-5551352a6b8f" (UID: "dd700cd6-556a-4331-b411-5551352a6b8f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.809840 4712 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.817999 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr" (OuterVolumeSpecName: "kube-api-access-7bszr") pod "dd700cd6-556a-4331-b411-5551352a6b8f" (UID: "dd700cd6-556a-4331-b411-5551352a6b8f"). InnerVolumeSpecName "kube-api-access-7bszr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.911077 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bszr\" (UniqueName: \"kubernetes.io/projected/dd700cd6-556a-4331-b411-5551352a6b8f-kube-api-access-7bszr\") on node \"crc\" DevicePath \"\"" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.950286 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:51:38 crc kubenswrapper[4712]: E0131 05:51:38.950795 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="extract" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.950827 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="extract" Jan 31 05:51:38 crc kubenswrapper[4712]: E0131 05:51:38.950856 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="util" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.950873 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="util" Jan 31 05:51:38 crc kubenswrapper[4712]: E0131 05:51:38.950909 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="pull" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.950930 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="pull" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.951231 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd700cd6-556a-4331-b411-5551352a6b8f" containerName="extract" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.953009 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:38 crc kubenswrapper[4712]: I0131 05:51:38.975001 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.012596 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.012692 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6jgg\" (UniqueName: \"kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.012788 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.113606 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.113701 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6jgg\" (UniqueName: \"kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.113747 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.115376 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.115092 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.159508 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6jgg\" (UniqueName: \"kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg\") pod \"redhat-operators-hjkf7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.290808 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.445492 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" event={"ID":"dd700cd6-556a-4331-b411-5551352a6b8f","Type":"ContainerDied","Data":"1401a4255b151abf5cedc37d3b7346920775ec20760e4aa7e9bb2ab8ffbc9f81"} Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.445953 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1401a4255b151abf5cedc37d3b7346920775ec20760e4aa7e9bb2ab8ffbc9f81" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.446006 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.547991 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.590411 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util" (OuterVolumeSpecName: "util") pod "dd700cd6-556a-4331-b411-5551352a6b8f" (UID: "dd700cd6-556a-4331-b411-5551352a6b8f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:51:39 crc kubenswrapper[4712]: I0131 05:51:39.620820 4712 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd700cd6-556a-4331-b411-5551352a6b8f-util\") on node \"crc\" DevicePath \"\"" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.123017 4712 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.453151 4712 generic.go:334] "Generic (PLEG): container finished" podID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerID="2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d" exitCode=0 Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.453777 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerDied","Data":"2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d"} Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.453816 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerStarted","Data":"e58a0009e7012ded4b88b30fe1c379013761031ad5c77462f23cc569a93c57d5"} Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.713681 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-tp5m4"] Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.714331 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.716511 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.716962 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-l5jvb" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.717684 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.730409 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-tp5m4"] Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.839343 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxwwb\" (UniqueName: \"kubernetes.io/projected/9f81c039-533b-498a-8958-8b217806c189-kube-api-access-bxwwb\") pod \"nmstate-operator-646758c888-tp5m4\" (UID: \"9f81c039-533b-498a-8958-8b217806c189\") " pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.941435 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxwwb\" (UniqueName: \"kubernetes.io/projected/9f81c039-533b-498a-8958-8b217806c189-kube-api-access-bxwwb\") pod \"nmstate-operator-646758c888-tp5m4\" (UID: \"9f81c039-533b-498a-8958-8b217806c189\") " pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" Jan 31 05:51:40 crc kubenswrapper[4712]: I0131 05:51:40.963007 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxwwb\" (UniqueName: \"kubernetes.io/projected/9f81c039-533b-498a-8958-8b217806c189-kube-api-access-bxwwb\") pod \"nmstate-operator-646758c888-tp5m4\" (UID: \"9f81c039-533b-498a-8958-8b217806c189\") " pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" Jan 31 05:51:41 crc kubenswrapper[4712]: I0131 05:51:41.029004 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" Jan 31 05:51:41 crc kubenswrapper[4712]: I0131 05:51:41.291361 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-tp5m4"] Jan 31 05:51:41 crc kubenswrapper[4712]: W0131 05:51:41.333124 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f81c039_533b_498a_8958_8b217806c189.slice/crio-19557a8c675cc806f1fc487f847b21edf16e52fe8edf57c9f2a1f224ccfb7bca WatchSource:0}: Error finding container 19557a8c675cc806f1fc487f847b21edf16e52fe8edf57c9f2a1f224ccfb7bca: Status 404 returned error can't find the container with id 19557a8c675cc806f1fc487f847b21edf16e52fe8edf57c9f2a1f224ccfb7bca Jan 31 05:51:41 crc kubenswrapper[4712]: I0131 05:51:41.462302 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" event={"ID":"9f81c039-533b-498a-8958-8b217806c189","Type":"ContainerStarted","Data":"19557a8c675cc806f1fc487f847b21edf16e52fe8edf57c9f2a1f224ccfb7bca"} Jan 31 05:51:42 crc kubenswrapper[4712]: I0131 05:51:42.473573 4712 generic.go:334] "Generic (PLEG): container finished" podID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerID="83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a" exitCode=0 Jan 31 05:51:42 crc kubenswrapper[4712]: I0131 05:51:42.473694 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerDied","Data":"83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a"} Jan 31 05:51:42 crc kubenswrapper[4712]: I0131 05:51:42.497448 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:51:42 crc kubenswrapper[4712]: I0131 05:51:42.497546 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:51:44 crc kubenswrapper[4712]: I0131 05:51:44.497465 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerStarted","Data":"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d"} Jan 31 05:51:44 crc kubenswrapper[4712]: I0131 05:51:44.500751 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" event={"ID":"9f81c039-533b-498a-8958-8b217806c189","Type":"ContainerStarted","Data":"e3c66da41ea1788f25bf734a0d521aa3e47a255e715c97fd977e8fc3d7cc9033"} Jan 31 05:51:44 crc kubenswrapper[4712]: I0131 05:51:44.562214 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-tp5m4" podStartSLOduration=1.735772241 podStartE2EDuration="4.562164048s" podCreationTimestamp="2026-01-31 05:51:40 +0000 UTC" firstStartedPulling="2026-01-31 05:51:41.335191647 +0000 UTC m=+767.429073488" lastFinishedPulling="2026-01-31 05:51:44.161583444 +0000 UTC m=+770.255465295" observedRunningTime="2026-01-31 05:51:44.552805783 +0000 UTC m=+770.646687644" watchObservedRunningTime="2026-01-31 05:51:44.562164048 +0000 UTC m=+770.656045909" Jan 31 05:51:44 crc kubenswrapper[4712]: I0131 05:51:44.568594 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hjkf7" podStartSLOduration=3.07072925 podStartE2EDuration="6.568577412s" podCreationTimestamp="2026-01-31 05:51:38 +0000 UTC" firstStartedPulling="2026-01-31 05:51:40.455582065 +0000 UTC m=+766.549463916" lastFinishedPulling="2026-01-31 05:51:43.953430237 +0000 UTC m=+770.047312078" observedRunningTime="2026-01-31 05:51:44.52683009 +0000 UTC m=+770.620711951" watchObservedRunningTime="2026-01-31 05:51:44.568577412 +0000 UTC m=+770.662459263" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.565838 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nnmp4"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.566983 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.571262 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ntd8g" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.586646 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nnmp4"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.590393 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.591245 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.596565 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.599464 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-44b4c"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.600768 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.623776 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb6sg\" (UniqueName: \"kubernetes.io/projected/f5cd590c-d8eb-429e-8023-0a0b981d2437-kube-api-access-kb6sg\") pod \"nmstate-metrics-54757c584b-nnmp4\" (UID: \"f5cd590c-d8eb-429e-8023-0a0b981d2437\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.631118 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729676 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-dbus-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729747 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv82w\" (UniqueName: \"kubernetes.io/projected/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-kube-api-access-qv82w\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729800 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb6sg\" (UniqueName: \"kubernetes.io/projected/f5cd590c-d8eb-429e-8023-0a0b981d2437-kube-api-access-kb6sg\") pod \"nmstate-metrics-54757c584b-nnmp4\" (UID: \"f5cd590c-d8eb-429e-8023-0a0b981d2437\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729835 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/737942ad-fc1c-4559-a40a-e772801f3da4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729861 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-ovs-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729913 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vfnz\" (UniqueName: \"kubernetes.io/projected/737942ad-fc1c-4559-a40a-e772801f3da4-kube-api-access-5vfnz\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.729944 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-nmstate-lock\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.768897 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.770257 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.771841 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-5tld8" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.771973 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb6sg\" (UniqueName: \"kubernetes.io/projected/f5cd590c-d8eb-429e-8023-0a0b981d2437-kube-api-access-kb6sg\") pod \"nmstate-metrics-54757c584b-nnmp4\" (UID: \"f5cd590c-d8eb-429e-8023-0a0b981d2437\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.772683 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.773473 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.776162 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834702 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834794 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vfnz\" (UniqueName: \"kubernetes.io/projected/737942ad-fc1c-4559-a40a-e772801f3da4-kube-api-access-5vfnz\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834829 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-nmstate-lock\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834887 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-dbus-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834909 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hds9r\" (UniqueName: \"kubernetes.io/projected/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-kube-api-access-hds9r\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834953 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv82w\" (UniqueName: \"kubernetes.io/projected/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-kube-api-access-qv82w\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.834999 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.835042 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/737942ad-fc1c-4559-a40a-e772801f3da4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.835060 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-ovs-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.835189 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-ovs-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.835831 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-nmstate-lock\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.841154 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-dbus-socket\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.841973 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/737942ad-fc1c-4559-a40a-e772801f3da4-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.863750 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv82w\" (UniqueName: \"kubernetes.io/projected/f53db0c4-e85d-4db9-b1c6-12bb90a2d886-kube-api-access-qv82w\") pod \"nmstate-handler-44b4c\" (UID: \"f53db0c4-e85d-4db9-b1c6-12bb90a2d886\") " pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.869555 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vfnz\" (UniqueName: \"kubernetes.io/projected/737942ad-fc1c-4559-a40a-e772801f3da4-kube-api-access-5vfnz\") pod \"nmstate-webhook-8474b5b9d8-ftd75\" (UID: \"737942ad-fc1c-4559-a40a-e772801f3da4\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.887576 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.918619 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.933790 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.935864 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.935937 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hds9r\" (UniqueName: \"kubernetes.io/projected/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-kube-api-access-hds9r\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.935985 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: E0131 05:51:45.936150 4712 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jan 31 05:51:45 crc kubenswrapper[4712]: E0131 05:51:45.936262 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert podName:59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4 nodeName:}" failed. No retries permitted until 2026-01-31 05:51:46.436237958 +0000 UTC m=+772.530119799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-pjjc5" (UID: "59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4") : secret "plugin-serving-cert" not found Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.937398 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.957731 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hds9r\" (UniqueName: \"kubernetes.io/projected/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-kube-api-access-hds9r\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.958557 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-8649cbcc7f-wjpkw"] Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.959619 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:45 crc kubenswrapper[4712]: I0131 05:51:45.984192 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8649cbcc7f-wjpkw"] Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037102 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-service-ca\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037207 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-trusted-ca-bundle\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037251 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037292 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037316 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr5nv\" (UniqueName: \"kubernetes.io/projected/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-kube-api-access-dr5nv\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037370 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-oauth-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.037387 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-oauth-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141675 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-trusted-ca-bundle\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141772 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141832 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141869 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr5nv\" (UniqueName: \"kubernetes.io/projected/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-kube-api-access-dr5nv\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141895 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-oauth-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141920 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-oauth-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.141965 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-service-ca\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.143398 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-service-ca\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.143443 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.143625 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-trusted-ca-bundle\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.144385 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-oauth-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.147477 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-serving-cert\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.147624 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-console-oauth-config\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.161088 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr5nv\" (UniqueName: \"kubernetes.io/projected/3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c-kube-api-access-dr5nv\") pod \"console-8649cbcc7f-wjpkw\" (UID: \"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c\") " pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.294632 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.417292 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75"] Jan 31 05:51:46 crc kubenswrapper[4712]: W0131 05:51:46.434214 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod737942ad_fc1c_4559_a40a_e772801f3da4.slice/crio-284e2e2bf3ab73c261e1ddc813a05dab0e39d55a666fc62b51416517de8aea79 WatchSource:0}: Error finding container 284e2e2bf3ab73c261e1ddc813a05dab0e39d55a666fc62b51416517de8aea79: Status 404 returned error can't find the container with id 284e2e2bf3ab73c261e1ddc813a05dab0e39d55a666fc62b51416517de8aea79 Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.447892 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.453410 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-pjjc5\" (UID: \"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.482897 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nnmp4"] Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.515010 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" event={"ID":"f5cd590c-d8eb-429e-8023-0a0b981d2437","Type":"ContainerStarted","Data":"7d03ae733851d7fbb0bd19bafe23f394ca5ff52d1aa0af88ca57893465585ee6"} Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.517214 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" event={"ID":"737942ad-fc1c-4559-a40a-e772801f3da4","Type":"ContainerStarted","Data":"284e2e2bf3ab73c261e1ddc813a05dab0e39d55a666fc62b51416517de8aea79"} Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.520687 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44b4c" event={"ID":"f53db0c4-e85d-4db9-b1c6-12bb90a2d886","Type":"ContainerStarted","Data":"14aae60fda33bc57c4cd2abbf82835752ddf0f68e8130a7e0af12ca07ae19c40"} Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.717323 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.797868 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8649cbcc7f-wjpkw"] Jan 31 05:51:46 crc kubenswrapper[4712]: I0131 05:51:46.970888 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5"] Jan 31 05:51:46 crc kubenswrapper[4712]: W0131 05:51:46.978365 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59d0e8e3_1a37_42cd_a1b3_175f6a5fe8a4.slice/crio-077bf9af131f3b4a68cb47eef6696af7311552c1e1a212f2ff83b46bb430447c WatchSource:0}: Error finding container 077bf9af131f3b4a68cb47eef6696af7311552c1e1a212f2ff83b46bb430447c: Status 404 returned error can't find the container with id 077bf9af131f3b4a68cb47eef6696af7311552c1e1a212f2ff83b46bb430447c Jan 31 05:51:47 crc kubenswrapper[4712]: I0131 05:51:47.530973 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" event={"ID":"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4","Type":"ContainerStarted","Data":"077bf9af131f3b4a68cb47eef6696af7311552c1e1a212f2ff83b46bb430447c"} Jan 31 05:51:47 crc kubenswrapper[4712]: I0131 05:51:47.532879 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8649cbcc7f-wjpkw" event={"ID":"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c","Type":"ContainerStarted","Data":"6bf827cefba9ffa1ca0d0e7eb8e0b71cb79bcaaa496e4307a47e0212fbe4dc4d"} Jan 31 05:51:49 crc kubenswrapper[4712]: I0131 05:51:49.291542 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:49 crc kubenswrapper[4712]: I0131 05:51:49.291636 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:49 crc kubenswrapper[4712]: I0131 05:51:49.548414 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8649cbcc7f-wjpkw" event={"ID":"3f25fcd6-b8e1-4c3f-b1d9-73fc617c881c","Type":"ContainerStarted","Data":"7779996ef30decc5fd9daa74a5b43f59fceb8bd47e8e3ed8cdea6d8f768adf66"} Jan 31 05:51:49 crc kubenswrapper[4712]: I0131 05:51:49.585267 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-8649cbcc7f-wjpkw" podStartSLOduration=4.585232877 podStartE2EDuration="4.585232877s" podCreationTimestamp="2026-01-31 05:51:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:51:49.585232947 +0000 UTC m=+775.679114808" watchObservedRunningTime="2026-01-31 05:51:49.585232877 +0000 UTC m=+775.679114728" Jan 31 05:51:50 crc kubenswrapper[4712]: I0131 05:51:50.347572 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hjkf7" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="registry-server" probeResult="failure" output=< Jan 31 05:51:50 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 05:51:50 crc kubenswrapper[4712]: > Jan 31 05:51:53 crc kubenswrapper[4712]: I0131 05:51:53.583323 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" event={"ID":"737942ad-fc1c-4559-a40a-e772801f3da4","Type":"ContainerStarted","Data":"4c33a86102f931349b8b96b8c5b5a8d579b0e287c7f97c3db8ece8d652a2fc91"} Jan 31 05:51:53 crc kubenswrapper[4712]: I0131 05:51:53.584305 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:51:53 crc kubenswrapper[4712]: I0131 05:51:53.585774 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" event={"ID":"59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4","Type":"ContainerStarted","Data":"ba31199bdfba6ea31420d0c8c1b7db9b55c9b13b68161cc328f3edc8b54a96d0"} Jan 31 05:51:53 crc kubenswrapper[4712]: I0131 05:51:53.587689 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" event={"ID":"f5cd590c-d8eb-429e-8023-0a0b981d2437","Type":"ContainerStarted","Data":"bf508883f8707ffa4dd311434aa1754c16e589ffd0e46cd598d006aa4c64c3e5"} Jan 31 05:51:53 crc kubenswrapper[4712]: I0131 05:51:53.607903 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" podStartSLOduration=1.744237027 podStartE2EDuration="8.607862174s" podCreationTimestamp="2026-01-31 05:51:45 +0000 UTC" firstStartedPulling="2026-01-31 05:51:46.437103828 +0000 UTC m=+772.530985679" lastFinishedPulling="2026-01-31 05:51:53.300728945 +0000 UTC m=+779.394610826" observedRunningTime="2026-01-31 05:51:53.599255287 +0000 UTC m=+779.693137148" watchObservedRunningTime="2026-01-31 05:51:53.607862174 +0000 UTC m=+779.701744015" Jan 31 05:51:54 crc kubenswrapper[4712]: I0131 05:51:54.538579 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-pjjc5" podStartSLOduration=3.261741919 podStartE2EDuration="9.538549071s" podCreationTimestamp="2026-01-31 05:51:45 +0000 UTC" firstStartedPulling="2026-01-31 05:51:46.98299528 +0000 UTC m=+773.076877121" lastFinishedPulling="2026-01-31 05:51:53.259802422 +0000 UTC m=+779.353684273" observedRunningTime="2026-01-31 05:51:53.62660625 +0000 UTC m=+779.720488091" watchObservedRunningTime="2026-01-31 05:51:54.538549071 +0000 UTC m=+780.632430922" Jan 31 05:51:54 crc kubenswrapper[4712]: I0131 05:51:54.595928 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44b4c" event={"ID":"f53db0c4-e85d-4db9-b1c6-12bb90a2d886","Type":"ContainerStarted","Data":"992492e4e8cc9b28e4f1e506bb9b225bd48424ee579dac56c565e2b72b6e5499"} Jan 31 05:51:54 crc kubenswrapper[4712]: I0131 05:51:54.618499 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-44b4c" podStartSLOduration=2.3475073 podStartE2EDuration="9.618475749s" podCreationTimestamp="2026-01-31 05:51:45 +0000 UTC" firstStartedPulling="2026-01-31 05:51:45.995630973 +0000 UTC m=+772.089512814" lastFinishedPulling="2026-01-31 05:51:53.266599422 +0000 UTC m=+779.360481263" observedRunningTime="2026-01-31 05:51:54.613968295 +0000 UTC m=+780.707850156" watchObservedRunningTime="2026-01-31 05:51:54.618475749 +0000 UTC m=+780.712357590" Jan 31 05:51:55 crc kubenswrapper[4712]: I0131 05:51:55.602387 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.295268 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.295374 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.304658 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.612634 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" event={"ID":"f5cd590c-d8eb-429e-8023-0a0b981d2437","Type":"ContainerStarted","Data":"6b680064aeb364fa872c180dd986688e43059a27f5f6e30a2f6f3ce9a17345db"} Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.627808 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-8649cbcc7f-wjpkw" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.649307 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-nnmp4" podStartSLOduration=1.834973894 podStartE2EDuration="11.649262804s" podCreationTimestamp="2026-01-31 05:51:45 +0000 UTC" firstStartedPulling="2026-01-31 05:51:46.500937561 +0000 UTC m=+772.594819402" lastFinishedPulling="2026-01-31 05:51:56.315226441 +0000 UTC m=+782.409108312" observedRunningTime="2026-01-31 05:51:56.640142806 +0000 UTC m=+782.734024697" watchObservedRunningTime="2026-01-31 05:51:56.649262804 +0000 UTC m=+782.743144695" Jan 31 05:51:56 crc kubenswrapper[4712]: I0131 05:51:56.724125 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:51:59 crc kubenswrapper[4712]: I0131 05:51:59.361607 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:59 crc kubenswrapper[4712]: I0131 05:51:59.437828 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:51:59 crc kubenswrapper[4712]: I0131 05:51:59.616356 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:52:00 crc kubenswrapper[4712]: I0131 05:52:00.642836 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hjkf7" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="registry-server" containerID="cri-o://9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d" gracePeriod=2 Jan 31 05:52:00 crc kubenswrapper[4712]: I0131 05:52:00.968969 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-44b4c" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.042365 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.112825 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content\") pod \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.112904 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities\") pod \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.112931 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6jgg\" (UniqueName: \"kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg\") pod \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\" (UID: \"90a3a2c9-d5ac-430f-bb99-693fbaace4c7\") " Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.114078 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities" (OuterVolumeSpecName: "utilities") pod "90a3a2c9-d5ac-430f-bb99-693fbaace4c7" (UID: "90a3a2c9-d5ac-430f-bb99-693fbaace4c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.121902 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg" (OuterVolumeSpecName: "kube-api-access-x6jgg") pod "90a3a2c9-d5ac-430f-bb99-693fbaace4c7" (UID: "90a3a2c9-d5ac-430f-bb99-693fbaace4c7"). InnerVolumeSpecName "kube-api-access-x6jgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.215339 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.215720 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6jgg\" (UniqueName: \"kubernetes.io/projected/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-kube-api-access-x6jgg\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.272732 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90a3a2c9-d5ac-430f-bb99-693fbaace4c7" (UID: "90a3a2c9-d5ac-430f-bb99-693fbaace4c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.317516 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90a3a2c9-d5ac-430f-bb99-693fbaace4c7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.656718 4712 generic.go:334] "Generic (PLEG): container finished" podID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerID="9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d" exitCode=0 Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.656830 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerDied","Data":"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d"} Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.656894 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hjkf7" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.657997 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hjkf7" event={"ID":"90a3a2c9-d5ac-430f-bb99-693fbaace4c7","Type":"ContainerDied","Data":"e58a0009e7012ded4b88b30fe1c379013761031ad5c77462f23cc569a93c57d5"} Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.658126 4712 scope.go:117] "RemoveContainer" containerID="9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.697061 4712 scope.go:117] "RemoveContainer" containerID="83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.735255 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.738996 4712 scope.go:117] "RemoveContainer" containerID="2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.749708 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hjkf7"] Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.769356 4712 scope.go:117] "RemoveContainer" containerID="9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d" Jan 31 05:52:01 crc kubenswrapper[4712]: E0131 05:52:01.770356 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d\": container with ID starting with 9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d not found: ID does not exist" containerID="9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.770441 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d"} err="failed to get container status \"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d\": rpc error: code = NotFound desc = could not find container \"9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d\": container with ID starting with 9b92d514495bee06a2bc741eef59e006953deb349b172c020f4f65607771c42d not found: ID does not exist" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.770492 4712 scope.go:117] "RemoveContainer" containerID="83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a" Jan 31 05:52:01 crc kubenswrapper[4712]: E0131 05:52:01.770982 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a\": container with ID starting with 83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a not found: ID does not exist" containerID="83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.771053 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a"} err="failed to get container status \"83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a\": rpc error: code = NotFound desc = could not find container \"83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a\": container with ID starting with 83ea93e188aaa7d6462a58cfb40bd75db0045a5c3e0453f0d7991bc8860d488a not found: ID does not exist" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.771086 4712 scope.go:117] "RemoveContainer" containerID="2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d" Jan 31 05:52:01 crc kubenswrapper[4712]: E0131 05:52:01.771639 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d\": container with ID starting with 2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d not found: ID does not exist" containerID="2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d" Jan 31 05:52:01 crc kubenswrapper[4712]: I0131 05:52:01.771691 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d"} err="failed to get container status \"2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d\": rpc error: code = NotFound desc = could not find container \"2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d\": container with ID starting with 2273b00ca08b351356b5c1c46f2b6b2d3fe7e066899aac9861af4289470da20d not found: ID does not exist" Jan 31 05:52:02 crc kubenswrapper[4712]: I0131 05:52:02.513023 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" path="/var/lib/kubelet/pods/90a3a2c9-d5ac-430f-bb99-693fbaace4c7/volumes" Jan 31 05:52:05 crc kubenswrapper[4712]: I0131 05:52:05.926237 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-ftd75" Jan 31 05:52:12 crc kubenswrapper[4712]: I0131 05:52:12.500301 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:52:12 crc kubenswrapper[4712]: I0131 05:52:12.500872 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.123921 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq"] Jan 31 05:52:20 crc kubenswrapper[4712]: E0131 05:52:20.125204 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="extract-utilities" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.125226 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="extract-utilities" Jan 31 05:52:20 crc kubenswrapper[4712]: E0131 05:52:20.125258 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="extract-content" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.125272 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="extract-content" Jan 31 05:52:20 crc kubenswrapper[4712]: E0131 05:52:20.125296 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="registry-server" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.125312 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="registry-server" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.125544 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="90a3a2c9-d5ac-430f-bb99-693fbaace4c7" containerName="registry-server" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.127034 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.131335 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.136651 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq"] Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.220692 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr8h4\" (UniqueName: \"kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.220802 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.220965 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.323084 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.323194 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.323246 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr8h4\" (UniqueName: \"kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.323844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.323896 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.360081 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr8h4\" (UniqueName: \"kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.454999 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.744947 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq"] Jan 31 05:52:20 crc kubenswrapper[4712]: I0131 05:52:20.798878 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" event={"ID":"ffe754a1-dc64-42e6-a072-d949b564a821","Type":"ContainerStarted","Data":"a0ad43313f5f1b6e145ef6f5eb1cd06c504d6cb7daae92c99600b114b27c22f4"} Jan 31 05:52:21 crc kubenswrapper[4712]: I0131 05:52:21.796758 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8qb8j" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerName="console" containerID="cri-o://7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e" gracePeriod=15 Jan 31 05:52:21 crc kubenswrapper[4712]: I0131 05:52:21.808647 4712 generic.go:334] "Generic (PLEG): container finished" podID="ffe754a1-dc64-42e6-a072-d949b564a821" containerID="5122d0ec3cc813d62159a8b5d4f476ab5caf44fea5baa7c6f69ec60ea6a5c2b7" exitCode=0 Jan 31 05:52:21 crc kubenswrapper[4712]: I0131 05:52:21.808725 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" event={"ID":"ffe754a1-dc64-42e6-a072-d949b564a821","Type":"ContainerDied","Data":"5122d0ec3cc813d62159a8b5d4f476ab5caf44fea5baa7c6f69ec60ea6a5c2b7"} Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.169112 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8qb8j_b2fa2327-5a62-4a7f-89e1-c5c7d7922f14/console/0.log" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.169535 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253690 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253741 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253816 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253847 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253873 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253934 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6cc9\" (UniqueName: \"kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.253960 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config\") pod \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\" (UID: \"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14\") " Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.254665 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.254846 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.254891 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config" (OuterVolumeSpecName: "console-config") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.255008 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca" (OuterVolumeSpecName: "service-ca") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.261839 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.261890 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9" (OuterVolumeSpecName: "kube-api-access-f6cc9") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "kube-api-access-f6cc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.261971 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" (UID: "b2fa2327-5a62-4a7f-89e1-c5c7d7922f14"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356038 4712 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356081 4712 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356093 4712 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-service-ca\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356105 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6cc9\" (UniqueName: \"kubernetes.io/projected/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-kube-api-access-f6cc9\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356122 4712 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356134 4712 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.356146 4712 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820305 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8qb8j_b2fa2327-5a62-4a7f-89e1-c5c7d7922f14/console/0.log" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820379 4712 generic.go:334] "Generic (PLEG): container finished" podID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerID="7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e" exitCode=2 Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820425 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8qb8j" event={"ID":"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14","Type":"ContainerDied","Data":"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e"} Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820468 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8qb8j" event={"ID":"b2fa2327-5a62-4a7f-89e1-c5c7d7922f14","Type":"ContainerDied","Data":"4fc992ab96d350ac3cc725c6187280b2c9689e5adf99d518bfc83b4665540cef"} Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820493 4712 scope.go:117] "RemoveContainer" containerID="7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.820496 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8qb8j" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.855301 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.859205 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8qb8j"] Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.860548 4712 scope.go:117] "RemoveContainer" containerID="7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e" Jan 31 05:52:22 crc kubenswrapper[4712]: E0131 05:52:22.861196 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e\": container with ID starting with 7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e not found: ID does not exist" containerID="7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e" Jan 31 05:52:22 crc kubenswrapper[4712]: I0131 05:52:22.861228 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e"} err="failed to get container status \"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e\": rpc error: code = NotFound desc = could not find container \"7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e\": container with ID starting with 7859c81a998d6cf769ce30e676f9aba8f8d09aa2c9390fd084b22883362d090e not found: ID does not exist" Jan 31 05:52:24 crc kubenswrapper[4712]: I0131 05:52:24.515938 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" path="/var/lib/kubelet/pods/b2fa2327-5a62-4a7f-89e1-c5c7d7922f14/volumes" Jan 31 05:52:24 crc kubenswrapper[4712]: I0131 05:52:24.842379 4712 generic.go:334] "Generic (PLEG): container finished" podID="ffe754a1-dc64-42e6-a072-d949b564a821" containerID="2004ae100a143808ed027fc664912ac3e8a49a052e63fc2f57ee767cd1d92175" exitCode=0 Jan 31 05:52:24 crc kubenswrapper[4712]: I0131 05:52:24.842602 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" event={"ID":"ffe754a1-dc64-42e6-a072-d949b564a821","Type":"ContainerDied","Data":"2004ae100a143808ed027fc664912ac3e8a49a052e63fc2f57ee767cd1d92175"} Jan 31 05:52:25 crc kubenswrapper[4712]: I0131 05:52:25.857579 4712 generic.go:334] "Generic (PLEG): container finished" podID="ffe754a1-dc64-42e6-a072-d949b564a821" containerID="afaa45aceee78291dbda6366476e3749444c9790316f5d099edf3f94ec03c1c1" exitCode=0 Jan 31 05:52:25 crc kubenswrapper[4712]: I0131 05:52:25.857901 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" event={"ID":"ffe754a1-dc64-42e6-a072-d949b564a821","Type":"ContainerDied","Data":"afaa45aceee78291dbda6366476e3749444c9790316f5d099edf3f94ec03c1c1"} Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.127478 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.239537 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle\") pod \"ffe754a1-dc64-42e6-a072-d949b564a821\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.240079 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr8h4\" (UniqueName: \"kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4\") pod \"ffe754a1-dc64-42e6-a072-d949b564a821\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.240453 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util\") pod \"ffe754a1-dc64-42e6-a072-d949b564a821\" (UID: \"ffe754a1-dc64-42e6-a072-d949b564a821\") " Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.240590 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle" (OuterVolumeSpecName: "bundle") pod "ffe754a1-dc64-42e6-a072-d949b564a821" (UID: "ffe754a1-dc64-42e6-a072-d949b564a821"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.241240 4712 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.251282 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util" (OuterVolumeSpecName: "util") pod "ffe754a1-dc64-42e6-a072-d949b564a821" (UID: "ffe754a1-dc64-42e6-a072-d949b564a821"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.255796 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4" (OuterVolumeSpecName: "kube-api-access-qr8h4") pod "ffe754a1-dc64-42e6-a072-d949b564a821" (UID: "ffe754a1-dc64-42e6-a072-d949b564a821"). InnerVolumeSpecName "kube-api-access-qr8h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.342137 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr8h4\" (UniqueName: \"kubernetes.io/projected/ffe754a1-dc64-42e6-a072-d949b564a821-kube-api-access-qr8h4\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.342192 4712 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffe754a1-dc64-42e6-a072-d949b564a821-util\") on node \"crc\" DevicePath \"\"" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.881153 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" event={"ID":"ffe754a1-dc64-42e6-a072-d949b564a821","Type":"ContainerDied","Data":"a0ad43313f5f1b6e145ef6f5eb1cd06c504d6cb7daae92c99600b114b27c22f4"} Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.881274 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0ad43313f5f1b6e145ef6f5eb1cd06c504d6cb7daae92c99600b114b27c22f4" Jan 31 05:52:27 crc kubenswrapper[4712]: I0131 05:52:27.881277 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.296997 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw"] Jan 31 05:52:38 crc kubenswrapper[4712]: E0131 05:52:38.297745 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="util" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297759 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="util" Jan 31 05:52:38 crc kubenswrapper[4712]: E0131 05:52:38.297776 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="pull" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297782 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="pull" Jan 31 05:52:38 crc kubenswrapper[4712]: E0131 05:52:38.297792 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="extract" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297798 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="extract" Jan 31 05:52:38 crc kubenswrapper[4712]: E0131 05:52:38.297809 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerName="console" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297814 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerName="console" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297919 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2fa2327-5a62-4a7f-89e1-c5c7d7922f14" containerName="console" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.297938 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffe754a1-dc64-42e6-a072-d949b564a821" containerName="extract" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.298380 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.303353 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.303735 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.304981 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-pg97p" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.305686 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.305773 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.324948 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw"] Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.448425 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-webhook-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.448495 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gktn\" (UniqueName: \"kubernetes.io/projected/28a2f231-88ed-4f0d-941b-aa351dcabfd8-kube-api-access-2gktn\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.448518 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-apiservice-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.550320 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gktn\" (UniqueName: \"kubernetes.io/projected/28a2f231-88ed-4f0d-941b-aa351dcabfd8-kube-api-access-2gktn\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.550375 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-apiservice-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.550489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-webhook-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.555042 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv"] Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.555830 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.558672 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-webhook-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.560715 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28a2f231-88ed-4f0d-941b-aa351dcabfd8-apiservice-cert\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.561411 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.561624 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.568788 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-kxpq2" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.574170 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv"] Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.583282 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gktn\" (UniqueName: \"kubernetes.io/projected/28a2f231-88ed-4f0d-941b-aa351dcabfd8-kube-api-access-2gktn\") pod \"metallb-operator-controller-manager-f5c7f4d69-nvgqw\" (UID: \"28a2f231-88ed-4f0d-941b-aa351dcabfd8\") " pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.618108 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.651485 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-apiservice-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.651543 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-webhook-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.651573 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h9xb\" (UniqueName: \"kubernetes.io/projected/0237140f-4dc3-42f6-8621-f96e8732af5e-kube-api-access-5h9xb\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.757256 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-apiservice-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.757804 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-webhook-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.757836 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h9xb\" (UniqueName: \"kubernetes.io/projected/0237140f-4dc3-42f6-8621-f96e8732af5e-kube-api-access-5h9xb\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.774899 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-apiservice-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.775434 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0237140f-4dc3-42f6-8621-f96e8732af5e-webhook-cert\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.799984 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h9xb\" (UniqueName: \"kubernetes.io/projected/0237140f-4dc3-42f6-8621-f96e8732af5e-kube-api-access-5h9xb\") pod \"metallb-operator-webhook-server-6d4f9bc9b4-stlsv\" (UID: \"0237140f-4dc3-42f6-8621-f96e8732af5e\") " pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.970519 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw"] Jan 31 05:52:38 crc kubenswrapper[4712]: I0131 05:52:38.979913 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:39 crc kubenswrapper[4712]: I0131 05:52:39.255795 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv"] Jan 31 05:52:39 crc kubenswrapper[4712]: W0131 05:52:39.264152 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0237140f_4dc3_42f6_8621_f96e8732af5e.slice/crio-57a0de74380955c1166f8254fac8593106d805f1818df8f949e385120c171e12 WatchSource:0}: Error finding container 57a0de74380955c1166f8254fac8593106d805f1818df8f949e385120c171e12: Status 404 returned error can't find the container with id 57a0de74380955c1166f8254fac8593106d805f1818df8f949e385120c171e12 Jan 31 05:52:39 crc kubenswrapper[4712]: I0131 05:52:39.960880 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" event={"ID":"28a2f231-88ed-4f0d-941b-aa351dcabfd8","Type":"ContainerStarted","Data":"4f8ab421fb1c9328b8ee67bbb89fb422a2d61feaa42966941f6965e4e9b9db0c"} Jan 31 05:52:39 crc kubenswrapper[4712]: I0131 05:52:39.961981 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" event={"ID":"0237140f-4dc3-42f6-8621-f96e8732af5e","Type":"ContainerStarted","Data":"57a0de74380955c1166f8254fac8593106d805f1818df8f949e385120c171e12"} Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.497077 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.497198 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.497276 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.498809 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.498944 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8" gracePeriod=600 Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.990411 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8" exitCode=0 Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.990483 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8"} Jan 31 05:52:42 crc kubenswrapper[4712]: I0131 05:52:42.990797 4712 scope.go:117] "RemoveContainer" containerID="b3a7c99a912faf460ca649d69822abd612ccb0330382bcc11abe1bfe6578ac2d" Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.004122 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" event={"ID":"28a2f231-88ed-4f0d-941b-aa351dcabfd8","Type":"ContainerStarted","Data":"f1ced4948dd093149b1021894410794910f63506645620db2f893f8d277e52ba"} Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.005081 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.006088 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" event={"ID":"0237140f-4dc3-42f6-8621-f96e8732af5e","Type":"ContainerStarted","Data":"6ba1223e9f29cf5efd561025f4065f50b44655c610fdfbe46f5ef03ebdb60e98"} Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.006216 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.009049 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59"} Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.033580 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" podStartSLOduration=1.766010391 podStartE2EDuration="7.033533422s" podCreationTimestamp="2026-01-31 05:52:38 +0000 UTC" firstStartedPulling="2026-01-31 05:52:38.985434546 +0000 UTC m=+825.079316387" lastFinishedPulling="2026-01-31 05:52:44.252957577 +0000 UTC m=+830.346839418" observedRunningTime="2026-01-31 05:52:45.028678981 +0000 UTC m=+831.122560852" watchObservedRunningTime="2026-01-31 05:52:45.033533422 +0000 UTC m=+831.127415263" Jan 31 05:52:45 crc kubenswrapper[4712]: I0131 05:52:45.093344 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" podStartSLOduration=2.102682628 podStartE2EDuration="7.093324014s" podCreationTimestamp="2026-01-31 05:52:38 +0000 UTC" firstStartedPulling="2026-01-31 05:52:39.269881088 +0000 UTC m=+825.363762929" lastFinishedPulling="2026-01-31 05:52:44.260522474 +0000 UTC m=+830.354404315" observedRunningTime="2026-01-31 05:52:45.084545933 +0000 UTC m=+831.178427794" watchObservedRunningTime="2026-01-31 05:52:45.093324014 +0000 UTC m=+831.187205855" Jan 31 05:52:58 crc kubenswrapper[4712]: I0131 05:52:58.987228 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6d4f9bc9b4-stlsv" Jan 31 05:53:18 crc kubenswrapper[4712]: I0131 05:53:18.622333 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f5c7f4d69-nvgqw" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.512534 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.513495 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.516316 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-8t8xr" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.516824 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.527647 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-7xh8p"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.533352 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.535774 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.537044 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.539683 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577066 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7276\" (UniqueName: \"kubernetes.io/projected/cb060e1c-3daf-49df-9e19-da31fc5b719c-kube-api-access-b7276\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577137 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-reloader\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577182 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqtlk\" (UniqueName: \"kubernetes.io/projected/4171e791-18bd-4302-933e-e49a8ad12e63-kube-api-access-kqtlk\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577214 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-sockets\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577241 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-conf\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577268 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics-certs\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577341 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577431 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-startup\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.577477 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4171e791-18bd-4302-933e-e49a8ad12e63-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.636627 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-j9fwd"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.637781 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.643919 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.644116 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6mwdr" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.644251 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.644347 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.645303 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-pwnsk"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.646730 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.656883 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.657976 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-pwnsk"] Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680322 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-startup\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680379 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4171e791-18bd-4302-933e-e49a8ad12e63-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680411 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-cert\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680433 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7276\" (UniqueName: \"kubernetes.io/projected/cb060e1c-3daf-49df-9e19-da31fc5b719c-kube-api-access-b7276\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680453 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-metrics-certs\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680474 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5a9228a1-741f-49a7-8e70-5f2079f89755-metallb-excludel2\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680494 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-reloader\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680512 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttb8w\" (UniqueName: \"kubernetes.io/projected/693f3580-e3b0-4892-a4be-1be046ccd732-kube-api-access-ttb8w\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680534 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqtlk\" (UniqueName: \"kubernetes.io/projected/4171e791-18bd-4302-933e-e49a8ad12e63-kube-api-access-kqtlk\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680555 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-sockets\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680575 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-conf\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680598 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics-certs\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680623 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpc78\" (UniqueName: \"kubernetes.io/projected/5a9228a1-741f-49a7-8e70-5f2079f89755-kube-api-access-fpc78\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680650 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680679 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.680703 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.681701 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-startup\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.683472 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-reloader\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.683780 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.683992 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-sockets\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.684197 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cb060e1c-3daf-49df-9e19-da31fc5b719c-frr-conf\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.687004 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb060e1c-3daf-49df-9e19-da31fc5b719c-metrics-certs\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.687463 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4171e791-18bd-4302-933e-e49a8ad12e63-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.708438 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7276\" (UniqueName: \"kubernetes.io/projected/cb060e1c-3daf-49df-9e19-da31fc5b719c-kube-api-access-b7276\") pod \"frr-k8s-7xh8p\" (UID: \"cb060e1c-3daf-49df-9e19-da31fc5b719c\") " pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.727844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqtlk\" (UniqueName: \"kubernetes.io/projected/4171e791-18bd-4302-933e-e49a8ad12e63-kube-api-access-kqtlk\") pod \"frr-k8s-webhook-server-7df86c4f6c-79qrg\" (UID: \"4171e791-18bd-4302-933e-e49a8ad12e63\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782303 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-cert\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782351 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-metrics-certs\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782373 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5a9228a1-741f-49a7-8e70-5f2079f89755-metallb-excludel2\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782395 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttb8w\" (UniqueName: \"kubernetes.io/projected/693f3580-e3b0-4892-a4be-1be046ccd732-kube-api-access-ttb8w\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782426 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpc78\" (UniqueName: \"kubernetes.io/projected/5a9228a1-741f-49a7-8e70-5f2079f89755-kube-api-access-fpc78\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782465 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.782488 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: E0131 05:53:19.782619 4712 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 31 05:53:19 crc kubenswrapper[4712]: E0131 05:53:19.782671 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs podName:5a9228a1-741f-49a7-8e70-5f2079f89755 nodeName:}" failed. No retries permitted until 2026-01-31 05:53:20.282652129 +0000 UTC m=+866.376533970 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs") pod "speaker-j9fwd" (UID: "5a9228a1-741f-49a7-8e70-5f2079f89755") : secret "speaker-certs-secret" not found Jan 31 05:53:19 crc kubenswrapper[4712]: E0131 05:53:19.783729 4712 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 31 05:53:19 crc kubenswrapper[4712]: E0131 05:53:19.783874 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist podName:5a9228a1-741f-49a7-8e70-5f2079f89755 nodeName:}" failed. No retries permitted until 2026-01-31 05:53:20.283841238 +0000 UTC m=+866.377723079 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist") pod "speaker-j9fwd" (UID: "5a9228a1-741f-49a7-8e70-5f2079f89755") : secret "metallb-memberlist" not found Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.783979 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5a9228a1-741f-49a7-8e70-5f2079f89755-metallb-excludel2\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.785543 4712 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.786819 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-metrics-certs\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.803746 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/693f3580-e3b0-4892-a4be-1be046ccd732-cert\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.816059 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpc78\" (UniqueName: \"kubernetes.io/projected/5a9228a1-741f-49a7-8e70-5f2079f89755-kube-api-access-fpc78\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.825982 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttb8w\" (UniqueName: \"kubernetes.io/projected/693f3580-e3b0-4892-a4be-1be046ccd732-kube-api-access-ttb8w\") pod \"controller-6968d8fdc4-pwnsk\" (UID: \"693f3580-e3b0-4892-a4be-1be046ccd732\") " pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.843572 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.878320 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:19 crc kubenswrapper[4712]: I0131 05:53:19.967846 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.192611 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-pwnsk"] Jan 31 05:53:20 crc kubenswrapper[4712]: W0131 05:53:20.195006 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod693f3580_e3b0_4892_a4be_1be046ccd732.slice/crio-14775b120a7be1704b9390f41bfd29c765433d1cdc92bb7637921f75a7c6a7d5 WatchSource:0}: Error finding container 14775b120a7be1704b9390f41bfd29c765433d1cdc92bb7637921f75a7c6a7d5: Status 404 returned error can't find the container with id 14775b120a7be1704b9390f41bfd29c765433d1cdc92bb7637921f75a7c6a7d5 Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.257400 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg"] Jan 31 05:53:20 crc kubenswrapper[4712]: W0131 05:53:20.265551 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4171e791_18bd_4302_933e_e49a8ad12e63.slice/crio-4e937d2e2c198cec4204998ecb5f9205c324a280f24ef035222bcf3775e1d415 WatchSource:0}: Error finding container 4e937d2e2c198cec4204998ecb5f9205c324a280f24ef035222bcf3775e1d415: Status 404 returned error can't find the container with id 4e937d2e2c198cec4204998ecb5f9205c324a280f24ef035222bcf3775e1d415 Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.270575 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-pwnsk" event={"ID":"693f3580-e3b0-4892-a4be-1be046ccd732","Type":"ContainerStarted","Data":"14775b120a7be1704b9390f41bfd29c765433d1cdc92bb7637921f75a7c6a7d5"} Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.274818 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" event={"ID":"4171e791-18bd-4302-933e-e49a8ad12e63","Type":"ContainerStarted","Data":"4e937d2e2c198cec4204998ecb5f9205c324a280f24ef035222bcf3775e1d415"} Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.275725 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"334bfb483dbc64884399ba40a3e6ee1c33623441aff3dff43de8ad3cfa1ad89b"} Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.290095 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.290150 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:20 crc kubenswrapper[4712]: E0131 05:53:20.290302 4712 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 31 05:53:20 crc kubenswrapper[4712]: E0131 05:53:20.290378 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist podName:5a9228a1-741f-49a7-8e70-5f2079f89755 nodeName:}" failed. No retries permitted until 2026-01-31 05:53:21.29035521 +0000 UTC m=+867.384237051 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist") pod "speaker-j9fwd" (UID: "5a9228a1-741f-49a7-8e70-5f2079f89755") : secret "metallb-memberlist" not found Jan 31 05:53:20 crc kubenswrapper[4712]: I0131 05:53:20.297401 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-metrics-certs\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.286841 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-pwnsk" event={"ID":"693f3580-e3b0-4892-a4be-1be046ccd732","Type":"ContainerStarted","Data":"467a4007201ca6e9f1e328ffd671704ef78460ad4bdd9747a87ff8983bc2d3b6"} Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.287367 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.287432 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-pwnsk" event={"ID":"693f3580-e3b0-4892-a4be-1be046ccd732","Type":"ContainerStarted","Data":"cca75a29c40e0c4911d0ef7c750883ad93bdaf1486f1187fb732bd5a85a1d3d7"} Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.308634 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.313718 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5a9228a1-741f-49a7-8e70-5f2079f89755-memberlist\") pod \"speaker-j9fwd\" (UID: \"5a9228a1-741f-49a7-8e70-5f2079f89755\") " pod="metallb-system/speaker-j9fwd" Jan 31 05:53:21 crc kubenswrapper[4712]: I0131 05:53:21.458776 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j9fwd" Jan 31 05:53:22 crc kubenswrapper[4712]: I0131 05:53:22.299545 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j9fwd" event={"ID":"5a9228a1-741f-49a7-8e70-5f2079f89755","Type":"ContainerStarted","Data":"4ab206cb1c322a7b7ef39c84478f9e709fd7b022910720bccf580a6832357702"} Jan 31 05:53:22 crc kubenswrapper[4712]: I0131 05:53:22.299962 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j9fwd" event={"ID":"5a9228a1-741f-49a7-8e70-5f2079f89755","Type":"ContainerStarted","Data":"263011eeb70b84143a9d6d56cc340753f139772e9e5f5eaeb4589f8f7feee532"} Jan 31 05:53:23 crc kubenswrapper[4712]: I0131 05:53:23.313138 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j9fwd" event={"ID":"5a9228a1-741f-49a7-8e70-5f2079f89755","Type":"ContainerStarted","Data":"356746f68cc28530a95c367aead4d2e03e4ccefe32f42d7bca326aa52a99bbd2"} Jan 31 05:53:23 crc kubenswrapper[4712]: I0131 05:53:23.313346 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-j9fwd" Jan 31 05:53:23 crc kubenswrapper[4712]: I0131 05:53:23.338485 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-j9fwd" podStartSLOduration=4.338468088 podStartE2EDuration="4.338468088s" podCreationTimestamp="2026-01-31 05:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:53:23.337269429 +0000 UTC m=+869.431151270" watchObservedRunningTime="2026-01-31 05:53:23.338468088 +0000 UTC m=+869.432349929" Jan 31 05:53:23 crc kubenswrapper[4712]: I0131 05:53:23.339045 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-pwnsk" podStartSLOduration=4.339041091 podStartE2EDuration="4.339041091s" podCreationTimestamp="2026-01-31 05:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:53:21.325779667 +0000 UTC m=+867.419661508" watchObservedRunningTime="2026-01-31 05:53:23.339041091 +0000 UTC m=+869.432922932" Jan 31 05:53:28 crc kubenswrapper[4712]: I0131 05:53:28.357065 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" event={"ID":"4171e791-18bd-4302-933e-e49a8ad12e63","Type":"ContainerStarted","Data":"df8669c80d9977de185766bd1e9f54d16a4b009fabc371ca270f1532ae0d6f53"} Jan 31 05:53:28 crc kubenswrapper[4712]: I0131 05:53:28.360441 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:28 crc kubenswrapper[4712]: I0131 05:53:28.360615 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerDied","Data":"c4c08184480b7cfcca90dfbbce73339df918c185d53ed805c5cb2d505e157e8f"} Jan 31 05:53:28 crc kubenswrapper[4712]: I0131 05:53:28.360540 4712 generic.go:334] "Generic (PLEG): container finished" podID="cb060e1c-3daf-49df-9e19-da31fc5b719c" containerID="c4c08184480b7cfcca90dfbbce73339df918c185d53ed805c5cb2d505e157e8f" exitCode=0 Jan 31 05:53:28 crc kubenswrapper[4712]: I0131 05:53:28.438910 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" podStartSLOduration=1.7963026389999999 podStartE2EDuration="9.438875169s" podCreationTimestamp="2026-01-31 05:53:19 +0000 UTC" firstStartedPulling="2026-01-31 05:53:20.267593708 +0000 UTC m=+866.361475549" lastFinishedPulling="2026-01-31 05:53:27.910166238 +0000 UTC m=+874.004048079" observedRunningTime="2026-01-31 05:53:28.386611454 +0000 UTC m=+874.480493335" watchObservedRunningTime="2026-01-31 05:53:28.438875169 +0000 UTC m=+874.532757050" Jan 31 05:53:29 crc kubenswrapper[4712]: I0131 05:53:29.373790 4712 generic.go:334] "Generic (PLEG): container finished" podID="cb060e1c-3daf-49df-9e19-da31fc5b719c" containerID="f96dc83f037cf85475175233d376ead7a33cbca9208690a48b1e841749534c61" exitCode=0 Jan 31 05:53:29 crc kubenswrapper[4712]: I0131 05:53:29.373963 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerDied","Data":"f96dc83f037cf85475175233d376ead7a33cbca9208690a48b1e841749534c61"} Jan 31 05:53:30 crc kubenswrapper[4712]: I0131 05:53:30.385044 4712 generic.go:334] "Generic (PLEG): container finished" podID="cb060e1c-3daf-49df-9e19-da31fc5b719c" containerID="8e32d77af4e7bc52cccda19a11d2a0b4184de6a7dbcf443b89e12ef2ce018870" exitCode=0 Jan 31 05:53:30 crc kubenswrapper[4712]: I0131 05:53:30.385140 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerDied","Data":"8e32d77af4e7bc52cccda19a11d2a0b4184de6a7dbcf443b89e12ef2ce018870"} Jan 31 05:53:31 crc kubenswrapper[4712]: I0131 05:53:31.397018 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"a07f598eef5cf6a1b731e3aedb82d0d27945905f763303a5705251454982d9cf"} Jan 31 05:53:31 crc kubenswrapper[4712]: I0131 05:53:31.397492 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"34f613b598c6bf84c914652788b90b372afe8581500d364b45a41ed80150e98b"} Jan 31 05:53:31 crc kubenswrapper[4712]: I0131 05:53:31.397506 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"f3daf2ed69254468988c78f146e59aa267ce51bed51764ad88bf802e02bb9913"} Jan 31 05:53:31 crc kubenswrapper[4712]: I0131 05:53:31.397515 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"d209c74a7ff6aee7ea01a4b87739a0866365eab457c25a311e4015fba8d95da0"} Jan 31 05:53:31 crc kubenswrapper[4712]: I0131 05:53:31.463767 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-j9fwd" Jan 31 05:53:32 crc kubenswrapper[4712]: I0131 05:53:32.413375 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"e25b2ad886041c56295cec9becfaf5f75ff470aff817ec0f9467c51c943fee3a"} Jan 31 05:53:32 crc kubenswrapper[4712]: I0131 05:53:32.413911 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:32 crc kubenswrapper[4712]: I0131 05:53:32.413928 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7xh8p" event={"ID":"cb060e1c-3daf-49df-9e19-da31fc5b719c","Type":"ContainerStarted","Data":"6fe5f41ad4a8e6075b028476f0eb0af69a12384549e36f90e3e3e212a0ea8693"} Jan 31 05:53:32 crc kubenswrapper[4712]: I0131 05:53:32.444583 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-7xh8p" podStartSLOduration=5.711813612 podStartE2EDuration="13.444551729s" podCreationTimestamp="2026-01-31 05:53:19 +0000 UTC" firstStartedPulling="2026-01-31 05:53:20.201675538 +0000 UTC m=+866.295557379" lastFinishedPulling="2026-01-31 05:53:27.934413645 +0000 UTC m=+874.028295496" observedRunningTime="2026-01-31 05:53:32.441993599 +0000 UTC m=+878.535875480" watchObservedRunningTime="2026-01-31 05:53:32.444551729 +0000 UTC m=+878.538433570" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.607815 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.609529 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.612976 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.614334 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-w8t7z" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.616106 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.625575 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.744649 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-648tr\" (UniqueName: \"kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr\") pod \"openstack-operator-index-scc2x\" (UID: \"3c00c25b-d94e-4d57-b604-00937fba85ba\") " pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.857489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-648tr\" (UniqueName: \"kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr\") pod \"openstack-operator-index-scc2x\" (UID: \"3c00c25b-d94e-4d57-b604-00937fba85ba\") " pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.878891 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.898797 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-648tr\" (UniqueName: \"kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr\") pod \"openstack-operator-index-scc2x\" (UID: \"3c00c25b-d94e-4d57-b604-00937fba85ba\") " pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.930656 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:34 crc kubenswrapper[4712]: I0131 05:53:34.939893 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:35 crc kubenswrapper[4712]: I0131 05:53:35.382670 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:35 crc kubenswrapper[4712]: I0131 05:53:35.439781 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-scc2x" event={"ID":"3c00c25b-d94e-4d57-b604-00937fba85ba","Type":"ContainerStarted","Data":"f76911fbbf925c89d749935c54f72b68daaebd8dfb15ecdd2866b91d669227f5"} Jan 31 05:53:37 crc kubenswrapper[4712]: I0131 05:53:37.780499 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.387381 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-zzwgm"] Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.389034 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.413507 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zzwgm"] Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.539382 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zvff\" (UniqueName: \"kubernetes.io/projected/f74d2a07-f376-41b5-b5c3-9305ad3a03fb-kube-api-access-2zvff\") pod \"openstack-operator-index-zzwgm\" (UID: \"f74d2a07-f376-41b5-b5c3-9305ad3a03fb\") " pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.641915 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zvff\" (UniqueName: \"kubernetes.io/projected/f74d2a07-f376-41b5-b5c3-9305ad3a03fb-kube-api-access-2zvff\") pod \"openstack-operator-index-zzwgm\" (UID: \"f74d2a07-f376-41b5-b5c3-9305ad3a03fb\") " pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.691444 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zvff\" (UniqueName: \"kubernetes.io/projected/f74d2a07-f376-41b5-b5c3-9305ad3a03fb-kube-api-access-2zvff\") pod \"openstack-operator-index-zzwgm\" (UID: \"f74d2a07-f376-41b5-b5c3-9305ad3a03fb\") " pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:38 crc kubenswrapper[4712]: I0131 05:53:38.747739 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.278689 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zzwgm"] Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.473848 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zzwgm" event={"ID":"f74d2a07-f376-41b5-b5c3-9305ad3a03fb","Type":"ContainerStarted","Data":"bd1f7d4b9ae4824684a61919e08ee56125d7d2517cda10485ee7f55ec9c7883a"} Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.477532 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-scc2x" event={"ID":"3c00c25b-d94e-4d57-b604-00937fba85ba","Type":"ContainerStarted","Data":"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb"} Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.477745 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-scc2x" podUID="3c00c25b-d94e-4d57-b604-00937fba85ba" containerName="registry-server" containerID="cri-o://af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb" gracePeriod=2 Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.506562 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-scc2x" podStartSLOduration=1.728117876 podStartE2EDuration="5.506537844s" podCreationTimestamp="2026-01-31 05:53:34 +0000 UTC" firstStartedPulling="2026-01-31 05:53:35.399307725 +0000 UTC m=+881.493189606" lastFinishedPulling="2026-01-31 05:53:39.177727723 +0000 UTC m=+885.271609574" observedRunningTime="2026-01-31 05:53:39.502635221 +0000 UTC m=+885.596517082" watchObservedRunningTime="2026-01-31 05:53:39.506537844 +0000 UTC m=+885.600419685" Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.850453 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-79qrg" Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.928231 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:39 crc kubenswrapper[4712]: I0131 05:53:39.978553 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-pwnsk" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.073257 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-648tr\" (UniqueName: \"kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr\") pod \"3c00c25b-d94e-4d57-b604-00937fba85ba\" (UID: \"3c00c25b-d94e-4d57-b604-00937fba85ba\") " Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.081998 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr" (OuterVolumeSpecName: "kube-api-access-648tr") pod "3c00c25b-d94e-4d57-b604-00937fba85ba" (UID: "3c00c25b-d94e-4d57-b604-00937fba85ba"). InnerVolumeSpecName "kube-api-access-648tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.175117 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-648tr\" (UniqueName: \"kubernetes.io/projected/3c00c25b-d94e-4d57-b604-00937fba85ba-kube-api-access-648tr\") on node \"crc\" DevicePath \"\"" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.489081 4712 generic.go:334] "Generic (PLEG): container finished" podID="3c00c25b-d94e-4d57-b604-00937fba85ba" containerID="af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb" exitCode=0 Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.489210 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-scc2x" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.489238 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-scc2x" event={"ID":"3c00c25b-d94e-4d57-b604-00937fba85ba","Type":"ContainerDied","Data":"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb"} Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.489321 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-scc2x" event={"ID":"3c00c25b-d94e-4d57-b604-00937fba85ba","Type":"ContainerDied","Data":"f76911fbbf925c89d749935c54f72b68daaebd8dfb15ecdd2866b91d669227f5"} Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.489357 4712 scope.go:117] "RemoveContainer" containerID="af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.491101 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zzwgm" event={"ID":"f74d2a07-f376-41b5-b5c3-9305ad3a03fb","Type":"ContainerStarted","Data":"616cc5a87871798950dad68359552e6fa60c741bfcd16718626c12d8ea1fc95e"} Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.513421 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-zzwgm" podStartSLOduration=2.036882023 podStartE2EDuration="2.513399561s" podCreationTimestamp="2026-01-31 05:53:38 +0000 UTC" firstStartedPulling="2026-01-31 05:53:39.289881425 +0000 UTC m=+885.383763266" lastFinishedPulling="2026-01-31 05:53:39.766398953 +0000 UTC m=+885.860280804" observedRunningTime="2026-01-31 05:53:40.506650341 +0000 UTC m=+886.600532202" watchObservedRunningTime="2026-01-31 05:53:40.513399561 +0000 UTC m=+886.607281402" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.531699 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.540781 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-scc2x"] Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.550229 4712 scope.go:117] "RemoveContainer" containerID="af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb" Jan 31 05:53:40 crc kubenswrapper[4712]: E0131 05:53:40.550810 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb\": container with ID starting with af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb not found: ID does not exist" containerID="af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb" Jan 31 05:53:40 crc kubenswrapper[4712]: I0131 05:53:40.550859 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb"} err="failed to get container status \"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb\": rpc error: code = NotFound desc = could not find container \"af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb\": container with ID starting with af0fdaae047b43f94fe137e66c0b9dcdd16fa0468a187d72c81546a3af0ac7fb not found: ID does not exist" Jan 31 05:53:42 crc kubenswrapper[4712]: I0131 05:53:42.519416 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c00c25b-d94e-4d57-b604-00937fba85ba" path="/var/lib/kubelet/pods/3c00c25b-d94e-4d57-b604-00937fba85ba/volumes" Jan 31 05:53:48 crc kubenswrapper[4712]: I0131 05:53:48.748771 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:48 crc kubenswrapper[4712]: I0131 05:53:48.750001 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:48 crc kubenswrapper[4712]: I0131 05:53:48.799494 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:49 crc kubenswrapper[4712]: I0131 05:53:49.616787 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-zzwgm" Jan 31 05:53:49 crc kubenswrapper[4712]: I0131 05:53:49.883456 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-7xh8p" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.554569 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj"] Jan 31 05:53:56 crc kubenswrapper[4712]: E0131 05:53:56.555973 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c00c25b-d94e-4d57-b604-00937fba85ba" containerName="registry-server" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.555997 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c00c25b-d94e-4d57-b604-00937fba85ba" containerName="registry-server" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.556322 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c00c25b-d94e-4d57-b604-00937fba85ba" containerName="registry-server" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.557816 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.561604 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-p78w4" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.580624 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj"] Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.672637 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.672709 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.672742 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlggr\" (UniqueName: \"kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.774016 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.774103 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.774144 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlggr\" (UniqueName: \"kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.774925 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.775255 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.810405 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlggr\" (UniqueName: \"kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr\") pod \"b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:56 crc kubenswrapper[4712]: I0131 05:53:56.893932 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:53:57 crc kubenswrapper[4712]: I0131 05:53:57.368279 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj"] Jan 31 05:53:57 crc kubenswrapper[4712]: W0131 05:53:57.379408 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99a223e0_dd1b_4f6c_b531_4e7b523468bd.slice/crio-ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459 WatchSource:0}: Error finding container ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459: Status 404 returned error can't find the container with id ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459 Jan 31 05:53:57 crc kubenswrapper[4712]: I0131 05:53:57.647992 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerStarted","Data":"9b855a7a3b86645c89e1c9198533023563687f3616bc8bd99f4a628663218618"} Jan 31 05:53:57 crc kubenswrapper[4712]: I0131 05:53:57.648070 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerStarted","Data":"ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459"} Jan 31 05:53:58 crc kubenswrapper[4712]: I0131 05:53:58.656563 4712 generic.go:334] "Generic (PLEG): container finished" podID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerID="9b855a7a3b86645c89e1c9198533023563687f3616bc8bd99f4a628663218618" exitCode=0 Jan 31 05:53:58 crc kubenswrapper[4712]: I0131 05:53:58.656616 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerDied","Data":"9b855a7a3b86645c89e1c9198533023563687f3616bc8bd99f4a628663218618"} Jan 31 05:53:59 crc kubenswrapper[4712]: I0131 05:53:59.666581 4712 generic.go:334] "Generic (PLEG): container finished" podID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerID="21bbb28047081ea5e9dc7e3286cd325c6c82e834d0d6bc1883d374846ef09337" exitCode=0 Jan 31 05:53:59 crc kubenswrapper[4712]: I0131 05:53:59.666671 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerDied","Data":"21bbb28047081ea5e9dc7e3286cd325c6c82e834d0d6bc1883d374846ef09337"} Jan 31 05:54:00 crc kubenswrapper[4712]: I0131 05:54:00.682391 4712 generic.go:334] "Generic (PLEG): container finished" podID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerID="075312c259a6476a38c6e37aa50bc19aa32ed66ba056dbc3fc3f19e0613d1ae7" exitCode=0 Jan 31 05:54:00 crc kubenswrapper[4712]: I0131 05:54:00.682491 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerDied","Data":"075312c259a6476a38c6e37aa50bc19aa32ed66ba056dbc3fc3f19e0613d1ae7"} Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.011872 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.107937 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlggr\" (UniqueName: \"kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr\") pod \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.108058 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle\") pod \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.108092 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util\") pod \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\" (UID: \"99a223e0-dd1b-4f6c-b531-4e7b523468bd\") " Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.109434 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle" (OuterVolumeSpecName: "bundle") pod "99a223e0-dd1b-4f6c-b531-4e7b523468bd" (UID: "99a223e0-dd1b-4f6c-b531-4e7b523468bd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.117465 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr" (OuterVolumeSpecName: "kube-api-access-wlggr") pod "99a223e0-dd1b-4f6c-b531-4e7b523468bd" (UID: "99a223e0-dd1b-4f6c-b531-4e7b523468bd"). InnerVolumeSpecName "kube-api-access-wlggr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.124333 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util" (OuterVolumeSpecName: "util") pod "99a223e0-dd1b-4f6c-b531-4e7b523468bd" (UID: "99a223e0-dd1b-4f6c-b531-4e7b523468bd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.209954 4712 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-util\") on node \"crc\" DevicePath \"\"" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.210495 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlggr\" (UniqueName: \"kubernetes.io/projected/99a223e0-dd1b-4f6c-b531-4e7b523468bd-kube-api-access-wlggr\") on node \"crc\" DevicePath \"\"" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.210644 4712 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99a223e0-dd1b-4f6c-b531-4e7b523468bd-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.700525 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" event={"ID":"99a223e0-dd1b-4f6c-b531-4e7b523468bd","Type":"ContainerDied","Data":"ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459"} Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.700587 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccc24265162892ca8ca80790bd362f8e692ec5c7ecaaaf6733531b37d4254459" Jan 31 05:54:02 crc kubenswrapper[4712]: I0131 05:54:02.700683 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.610395 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj"] Jan 31 05:54:08 crc kubenswrapper[4712]: E0131 05:54:08.613337 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="pull" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.613419 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="pull" Jan 31 05:54:08 crc kubenswrapper[4712]: E0131 05:54:08.613491 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="util" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.613548 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="util" Jan 31 05:54:08 crc kubenswrapper[4712]: E0131 05:54:08.613613 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="extract" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.613672 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="extract" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.613845 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="99a223e0-dd1b-4f6c-b531-4e7b523468bd" containerName="extract" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.614429 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.618861 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-82kfb" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.632977 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj"] Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.717388 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86zts\" (UniqueName: \"kubernetes.io/projected/23a3b35d-3b93-4e18-b4af-665b780f3580-kube-api-access-86zts\") pod \"openstack-operator-controller-init-757f46c65d-qrpzj\" (UID: \"23a3b35d-3b93-4e18-b4af-665b780f3580\") " pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.818927 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86zts\" (UniqueName: \"kubernetes.io/projected/23a3b35d-3b93-4e18-b4af-665b780f3580-kube-api-access-86zts\") pod \"openstack-operator-controller-init-757f46c65d-qrpzj\" (UID: \"23a3b35d-3b93-4e18-b4af-665b780f3580\") " pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.848769 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86zts\" (UniqueName: \"kubernetes.io/projected/23a3b35d-3b93-4e18-b4af-665b780f3580-kube-api-access-86zts\") pod \"openstack-operator-controller-init-757f46c65d-qrpzj\" (UID: \"23a3b35d-3b93-4e18-b4af-665b780f3580\") " pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:08 crc kubenswrapper[4712]: I0131 05:54:08.935086 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:09 crc kubenswrapper[4712]: I0131 05:54:09.217574 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj"] Jan 31 05:54:09 crc kubenswrapper[4712]: I0131 05:54:09.751142 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" event={"ID":"23a3b35d-3b93-4e18-b4af-665b780f3580","Type":"ContainerStarted","Data":"68e8540f62b3454d40c32e61a24609bf6b3050ddddc3b655f4766396e2ac40dd"} Jan 31 05:54:14 crc kubenswrapper[4712]: I0131 05:54:14.831949 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" event={"ID":"23a3b35d-3b93-4e18-b4af-665b780f3580","Type":"ContainerStarted","Data":"7a417d2e07fabe20558c61578a0a5b3da1876e649de5f931037580cd57388485"} Jan 31 05:54:14 crc kubenswrapper[4712]: I0131 05:54:14.833963 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:14 crc kubenswrapper[4712]: I0131 05:54:14.863651 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" podStartSLOduration=1.88224751 podStartE2EDuration="6.863624343s" podCreationTimestamp="2026-01-31 05:54:08 +0000 UTC" firstStartedPulling="2026-01-31 05:54:09.236008344 +0000 UTC m=+915.329890195" lastFinishedPulling="2026-01-31 05:54:14.217385187 +0000 UTC m=+920.311267028" observedRunningTime="2026-01-31 05:54:14.859662281 +0000 UTC m=+920.953544142" watchObservedRunningTime="2026-01-31 05:54:14.863624343 +0000 UTC m=+920.957506194" Jan 31 05:54:28 crc kubenswrapper[4712]: I0131 05:54:28.939151 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-757f46c65d-qrpzj" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.499945 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.502068 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.518862 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.522713 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.522775 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.522805 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgdz6\" (UniqueName: \"kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.624648 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.624734 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgdz6\" (UniqueName: \"kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.624906 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.625152 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.625551 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.650050 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgdz6\" (UniqueName: \"kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6\") pod \"certified-operators-hhfzr\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:53 crc kubenswrapper[4712]: I0131 05:54:53.832023 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:54:54 crc kubenswrapper[4712]: I0131 05:54:54.337564 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:54:55 crc kubenswrapper[4712]: I0131 05:54:55.120389 4712 generic.go:334] "Generic (PLEG): container finished" podID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerID="8b983a9d830986fba2c5cc590eab7735ffc983e1eb7e91dc7f8a656b70a2f0ad" exitCode=0 Jan 31 05:54:55 crc kubenswrapper[4712]: I0131 05:54:55.120571 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerDied","Data":"8b983a9d830986fba2c5cc590eab7735ffc983e1eb7e91dc7f8a656b70a2f0ad"} Jan 31 05:54:55 crc kubenswrapper[4712]: I0131 05:54:55.120747 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerStarted","Data":"ab1ef6ea31b01129e09d410dcff03fc3ae6fc389d297480caf4bde1c4b4c3ffc"} Jan 31 05:54:56 crc kubenswrapper[4712]: I0131 05:54:56.128891 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerStarted","Data":"ed362019272fcbd07cd35b362f029f9ed566935769847e5dd1a88b9c91da7d6d"} Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.137012 4712 generic.go:334] "Generic (PLEG): container finished" podID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerID="ed362019272fcbd07cd35b362f029f9ed566935769847e5dd1a88b9c91da7d6d" exitCode=0 Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.137138 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerDied","Data":"ed362019272fcbd07cd35b362f029f9ed566935769847e5dd1a88b9c91da7d6d"} Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.287379 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.288250 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.290195 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zp6lk" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.303474 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.304538 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.319160 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.329504 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-tjzmx" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.337822 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.338932 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.342675 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2bjd4" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.363376 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.380044 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5ldf\" (UniqueName: \"kubernetes.io/projected/e48bf123-5e17-4ef3-980f-92286c95bd85-kube-api-access-z5ldf\") pod \"designate-operator-controller-manager-6d9697b7f4-vmvmz\" (UID: \"e48bf123-5e17-4ef3-980f-92286c95bd85\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.380352 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxw8r\" (UniqueName: \"kubernetes.io/projected/e8223e38-a5ce-4f9f-9780-dea80a326f17-kube-api-access-xxw8r\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-wz82s\" (UID: \"e8223e38-a5ce-4f9f-9780-dea80a326f17\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.380497 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvrdz\" (UniqueName: \"kubernetes.io/projected/c0a2fda3-cecc-40e9-b15e-2d95487c7373-kube-api-access-bvrdz\") pod \"cinder-operator-controller-manager-8d874c8fc-74szb\" (UID: \"c0a2fda3-cecc-40e9-b15e-2d95487c7373\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.384464 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.385676 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.389510 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-4fhmf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.402634 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.419650 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.431219 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.432615 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.436806 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-cgst6" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.450238 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.458230 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.459202 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.459626 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.470563 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-bfttv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485681 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvrdz\" (UniqueName: \"kubernetes.io/projected/c0a2fda3-cecc-40e9-b15e-2d95487c7373-kube-api-access-bvrdz\") pod \"cinder-operator-controller-manager-8d874c8fc-74szb\" (UID: \"c0a2fda3-cecc-40e9-b15e-2d95487c7373\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485754 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkc82\" (UniqueName: \"kubernetes.io/projected/f94c7f2e-7429-4be0-bad9-f3cdf0156ba9-kube-api-access-xkc82\") pod \"horizon-operator-controller-manager-5fb775575f-7x6wg\" (UID: \"f94c7f2e-7429-4be0-bad9-f3cdf0156ba9\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485776 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5ldf\" (UniqueName: \"kubernetes.io/projected/e48bf123-5e17-4ef3-980f-92286c95bd85-kube-api-access-z5ldf\") pod \"designate-operator-controller-manager-6d9697b7f4-vmvmz\" (UID: \"e48bf123-5e17-4ef3-980f-92286c95bd85\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485798 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57jgx\" (UniqueName: \"kubernetes.io/projected/623f0661-5fd0-4c1c-94b8-7cb41dc60f5f-kube-api-access-57jgx\") pod \"heat-operator-controller-manager-69d6db494d-ktrfv\" (UID: \"623f0661-5fd0-4c1c-94b8-7cb41dc60f5f\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485816 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxw8r\" (UniqueName: \"kubernetes.io/projected/e8223e38-a5ce-4f9f-9780-dea80a326f17-kube-api-access-xxw8r\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-wz82s\" (UID: \"e8223e38-a5ce-4f9f-9780-dea80a326f17\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.485849 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwrpb\" (UniqueName: \"kubernetes.io/projected/b348e7d3-8e8d-484d-bd03-b27125c4fd58-kube-api-access-cwrpb\") pod \"glance-operator-controller-manager-8886f4c47-z96q2\" (UID: \"b348e7d3-8e8d-484d-bd03-b27125c4fd58\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.486768 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.487543 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.490620 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.491477 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.491519 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.491762 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-5dgd9" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.494910 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-mqzrv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.501202 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.521811 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5ldf\" (UniqueName: \"kubernetes.io/projected/e48bf123-5e17-4ef3-980f-92286c95bd85-kube-api-access-z5ldf\") pod \"designate-operator-controller-manager-6d9697b7f4-vmvmz\" (UID: \"e48bf123-5e17-4ef3-980f-92286c95bd85\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.521885 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.522754 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.524049 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvrdz\" (UniqueName: \"kubernetes.io/projected/c0a2fda3-cecc-40e9-b15e-2d95487c7373-kube-api-access-bvrdz\") pod \"cinder-operator-controller-manager-8d874c8fc-74szb\" (UID: \"c0a2fda3-cecc-40e9-b15e-2d95487c7373\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.525336 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-n9768"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.527958 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxw8r\" (UniqueName: \"kubernetes.io/projected/e8223e38-a5ce-4f9f-9780-dea80a326f17-kube-api-access-xxw8r\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-wz82s\" (UID: \"e8223e38-a5ce-4f9f-9780-dea80a326f17\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.528511 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-8gnbf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.528835 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.534144 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-wpkmr" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.562509 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.571231 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.572081 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.573679 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-r6q8r" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.587945 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvfnd\" (UniqueName: \"kubernetes.io/projected/d7b48b1c-633e-4714-a9fe-0cdb81dc946d-kube-api-access-nvfnd\") pod \"ironic-operator-controller-manager-5f4b8bd54d-9djc7\" (UID: \"d7b48b1c-633e-4714-a9fe-0cdb81dc946d\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.588485 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkc82\" (UniqueName: \"kubernetes.io/projected/f94c7f2e-7429-4be0-bad9-f3cdf0156ba9-kube-api-access-xkc82\") pod \"horizon-operator-controller-manager-5fb775575f-7x6wg\" (UID: \"f94c7f2e-7429-4be0-bad9-f3cdf0156ba9\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.588756 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57jgx\" (UniqueName: \"kubernetes.io/projected/623f0661-5fd0-4c1c-94b8-7cb41dc60f5f-kube-api-access-57jgx\") pod \"heat-operator-controller-manager-69d6db494d-ktrfv\" (UID: \"623f0661-5fd0-4c1c-94b8-7cb41dc60f5f\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.588900 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwrpb\" (UniqueName: \"kubernetes.io/projected/b348e7d3-8e8d-484d-bd03-b27125c4fd58-kube-api-access-cwrpb\") pod \"glance-operator-controller-manager-8886f4c47-z96q2\" (UID: \"b348e7d3-8e8d-484d-bd03-b27125c4fd58\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.589014 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.589238 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27wfz\" (UniqueName: \"kubernetes.io/projected/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-kube-api-access-27wfz\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.589441 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm8fc\" (UniqueName: \"kubernetes.io/projected/b81b1954-214b-40b7-886d-3da110000383-kube-api-access-rm8fc\") pod \"keystone-operator-controller-manager-84f48565d4-2zprf\" (UID: \"b81b1954-214b-40b7-886d-3da110000383\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.589529 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q89js\" (UniqueName: \"kubernetes.io/projected/2a9eefe0-c80b-479c-a630-4b94bea52b20-kube-api-access-q89js\") pod \"mariadb-operator-controller-manager-67bf948998-czjfw\" (UID: \"2a9eefe0-c80b-479c-a630-4b94bea52b20\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.590620 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdbzd\" (UniqueName: \"kubernetes.io/projected/ddfcf8fb-8920-44fa-a439-ea5d5b6456f4-kube-api-access-cdbzd\") pod \"manila-operator-controller-manager-7dd968899f-n9768\" (UID: \"ddfcf8fb-8920-44fa-a439-ea5d5b6456f4\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.595349 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.596505 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.604145 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-n9768"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.604922 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.613555 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-plcdq" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.631814 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.642403 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkc82\" (UniqueName: \"kubernetes.io/projected/f94c7f2e-7429-4be0-bad9-f3cdf0156ba9-kube-api-access-xkc82\") pod \"horizon-operator-controller-manager-5fb775575f-7x6wg\" (UID: \"f94c7f2e-7429-4be0-bad9-f3cdf0156ba9\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.646705 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57jgx\" (UniqueName: \"kubernetes.io/projected/623f0661-5fd0-4c1c-94b8-7cb41dc60f5f-kube-api-access-57jgx\") pod \"heat-operator-controller-manager-69d6db494d-ktrfv\" (UID: \"623f0661-5fd0-4c1c-94b8-7cb41dc60f5f\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.665854 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwrpb\" (UniqueName: \"kubernetes.io/projected/b348e7d3-8e8d-484d-bd03-b27125c4fd58-kube-api-access-cwrpb\") pod \"glance-operator-controller-manager-8886f4c47-z96q2\" (UID: \"b348e7d3-8e8d-484d-bd03-b27125c4fd58\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.667295 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.672813 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.701057 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.713406 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718136 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718388 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx9p2\" (UniqueName: \"kubernetes.io/projected/f7c522e9-e789-4fa5-9736-b6d921eba9e5-kube-api-access-hx9p2\") pod \"neutron-operator-controller-manager-585dbc889-5vrvb\" (UID: \"f7c522e9-e789-4fa5-9736-b6d921eba9e5\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718434 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27wfz\" (UniqueName: \"kubernetes.io/projected/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-kube-api-access-27wfz\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: E0131 05:54:57.718491 4712 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:57 crc kubenswrapper[4712]: E0131 05:54:57.718574 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert podName:d0f0cc18-6437-4c23-8ebd-f0a234fc72ff nodeName:}" failed. No retries permitted until 2026-01-31 05:54:58.218550621 +0000 UTC m=+964.312432472 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert") pod "infra-operator-controller-manager-79955696d6-pdzn7" (UID: "d0f0cc18-6437-4c23-8ebd-f0a234fc72ff") : secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718765 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm8fc\" (UniqueName: \"kubernetes.io/projected/b81b1954-214b-40b7-886d-3da110000383-kube-api-access-rm8fc\") pod \"keystone-operator-controller-manager-84f48565d4-2zprf\" (UID: \"b81b1954-214b-40b7-886d-3da110000383\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718814 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q89js\" (UniqueName: \"kubernetes.io/projected/2a9eefe0-c80b-479c-a630-4b94bea52b20-kube-api-access-q89js\") pod \"mariadb-operator-controller-manager-67bf948998-czjfw\" (UID: \"2a9eefe0-c80b-479c-a630-4b94bea52b20\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718861 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdbzd\" (UniqueName: \"kubernetes.io/projected/ddfcf8fb-8920-44fa-a439-ea5d5b6456f4-kube-api-access-cdbzd\") pod \"manila-operator-controller-manager-7dd968899f-n9768\" (UID: \"ddfcf8fb-8920-44fa-a439-ea5d5b6456f4\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.718893 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvfnd\" (UniqueName: \"kubernetes.io/projected/d7b48b1c-633e-4714-a9fe-0cdb81dc946d-kube-api-access-nvfnd\") pod \"ironic-operator-controller-manager-5f4b8bd54d-9djc7\" (UID: \"d7b48b1c-633e-4714-a9fe-0cdb81dc946d\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.727920 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.729662 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.757915 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.792207 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-qn8tn" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.805047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdbzd\" (UniqueName: \"kubernetes.io/projected/ddfcf8fb-8920-44fa-a439-ea5d5b6456f4-kube-api-access-cdbzd\") pod \"manila-operator-controller-manager-7dd968899f-n9768\" (UID: \"ddfcf8fb-8920-44fa-a439-ea5d5b6456f4\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.816983 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm8fc\" (UniqueName: \"kubernetes.io/projected/b81b1954-214b-40b7-886d-3da110000383-kube-api-access-rm8fc\") pod \"keystone-operator-controller-manager-84f48565d4-2zprf\" (UID: \"b81b1954-214b-40b7-886d-3da110000383\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.817727 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.818691 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27wfz\" (UniqueName: \"kubernetes.io/projected/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-kube-api-access-27wfz\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.820036 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9bc5\" (UniqueName: \"kubernetes.io/projected/46eafe76-b842-4889-98b5-eae45c6c9a70-kube-api-access-j9bc5\") pod \"nova-operator-controller-manager-55bff696bd-qwh9v\" (UID: \"46eafe76-b842-4889-98b5-eae45c6c9a70\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.820103 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx9p2\" (UniqueName: \"kubernetes.io/projected/f7c522e9-e789-4fa5-9736-b6d921eba9e5-kube-api-access-hx9p2\") pod \"neutron-operator-controller-manager-585dbc889-5vrvb\" (UID: \"f7c522e9-e789-4fa5-9736-b6d921eba9e5\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.828276 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q89js\" (UniqueName: \"kubernetes.io/projected/2a9eefe0-c80b-479c-a630-4b94bea52b20-kube-api-access-q89js\") pod \"mariadb-operator-controller-manager-67bf948998-czjfw\" (UID: \"2a9eefe0-c80b-479c-a630-4b94bea52b20\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.869503 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx9p2\" (UniqueName: \"kubernetes.io/projected/f7c522e9-e789-4fa5-9736-b6d921eba9e5-kube-api-access-hx9p2\") pod \"neutron-operator-controller-manager-585dbc889-5vrvb\" (UID: \"f7c522e9-e789-4fa5-9736-b6d921eba9e5\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.874231 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvfnd\" (UniqueName: \"kubernetes.io/projected/d7b48b1c-633e-4714-a9fe-0cdb81dc946d-kube-api-access-nvfnd\") pod \"ironic-operator-controller-manager-5f4b8bd54d-9djc7\" (UID: \"d7b48b1c-633e-4714-a9fe-0cdb81dc946d\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.874728 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.891561 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.918254 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.921836 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9bc5\" (UniqueName: \"kubernetes.io/projected/46eafe76-b842-4889-98b5-eae45c6c9a70-kube-api-access-j9bc5\") pod \"nova-operator-controller-manager-55bff696bd-qwh9v\" (UID: \"46eafe76-b842-4889-98b5-eae45c6c9a70\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.962236 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9bc5\" (UniqueName: \"kubernetes.io/projected/46eafe76-b842-4889-98b5-eae45c6c9a70-kube-api-access-j9bc5\") pod \"nova-operator-controller-manager-55bff696bd-qwh9v\" (UID: \"46eafe76-b842-4889-98b5-eae45c6c9a70\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.983922 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2"] Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.985215 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:54:57 crc kubenswrapper[4712]: I0131 05:54:57.997202 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-ldfpx" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.011498 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.024312 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz57v\" (UniqueName: \"kubernetes.io/projected/76f477c9-248d-45e0-acdc-098fd960378c-kube-api-access-tz57v\") pod \"octavia-operator-controller-manager-6687f8d877-f9sm2\" (UID: \"76f477c9-248d-45e0-acdc-098fd960378c\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.054720 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.098006 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.113316 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.117640 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.130258 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.131465 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.133252 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz57v\" (UniqueName: \"kubernetes.io/projected/76f477c9-248d-45e0-acdc-098fd960378c-kube-api-access-tz57v\") pod \"octavia-operator-controller-manager-6687f8d877-f9sm2\" (UID: \"76f477c9-248d-45e0-acdc-098fd960378c\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.135898 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-kjbd8" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.136205 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.138962 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.152629 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.153925 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.173090 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-f8w2r" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.208758 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.223366 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz57v\" (UniqueName: \"kubernetes.io/projected/76f477c9-248d-45e0-acdc-098fd960378c-kube-api-access-tz57v\") pod \"octavia-operator-controller-manager-6687f8d877-f9sm2\" (UID: \"76f477c9-248d-45e0-acdc-098fd960378c\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.233755 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.234757 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.235057 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.235112 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xssfk\" (UniqueName: \"kubernetes.io/projected/0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a-kube-api-access-xssfk\") pod \"ovn-operator-controller-manager-788c46999f-2fg8n\" (UID: \"0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.235140 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.235163 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdb4t\" (UniqueName: \"kubernetes.io/projected/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-kube-api-access-vdb4t\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.235353 4712 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.235405 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert podName:d0f0cc18-6437-4c23-8ebd-f0a234fc72ff nodeName:}" failed. No retries permitted until 2026-01-31 05:54:59.235384984 +0000 UTC m=+965.329266825 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert") pod "infra-operator-controller-manager-79955696d6-pdzn7" (UID: "d0f0cc18-6437-4c23-8ebd-f0a234fc72ff") : secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.235434 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.237330 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-2g5mc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.243700 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.244887 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.253824 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bm8g8" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.293796 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.307752 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.330138 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.331126 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.334705 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-z4x9w" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.336163 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpxzj\" (UniqueName: \"kubernetes.io/projected/ab883058-7fba-4506-8493-a1c290b67a44-kube-api-access-cpxzj\") pod \"swift-operator-controller-manager-68fc8c869-kcq2k\" (UID: \"ab883058-7fba-4506-8493-a1c290b67a44\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.336263 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7zfj\" (UniqueName: \"kubernetes.io/projected/df910b52-e35d-4099-abe9-676b2863ee90-kube-api-access-k7zfj\") pod \"placement-operator-controller-manager-5b964cf4cd-pfsg7\" (UID: \"df910b52-e35d-4099-abe9-676b2863ee90\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.336496 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xssfk\" (UniqueName: \"kubernetes.io/projected/0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a-kube-api-access-xssfk\") pod \"ovn-operator-controller-manager-788c46999f-2fg8n\" (UID: \"0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.336543 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.336594 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdb4t\" (UniqueName: \"kubernetes.io/projected/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-kube-api-access-vdb4t\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.336857 4712 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.336904 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert podName:cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd nodeName:}" failed. No retries permitted until 2026-01-31 05:54:58.836890099 +0000 UTC m=+964.930771940 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert") pod "openstack-baremetal-operator-controller-manager-86dfb79cc789484" (UID: "cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.338694 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.369926 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.371033 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.372131 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdb4t\" (UniqueName: \"kubernetes.io/projected/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-kube-api-access-vdb4t\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.378364 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-9pp27" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.381382 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xssfk\" (UniqueName: \"kubernetes.io/projected/0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a-kube-api-access-xssfk\") pod \"ovn-operator-controller-manager-788c46999f-2fg8n\" (UID: \"0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.382220 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.394887 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.414096 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-p7ht5"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.417425 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.430143 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-p7ht5"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.450961 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpxzj\" (UniqueName: \"kubernetes.io/projected/ab883058-7fba-4506-8493-a1c290b67a44-kube-api-access-cpxzj\") pod \"swift-operator-controller-manager-68fc8c869-kcq2k\" (UID: \"ab883058-7fba-4506-8493-a1c290b67a44\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.451478 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7zfj\" (UniqueName: \"kubernetes.io/projected/df910b52-e35d-4099-abe9-676b2863ee90-kube-api-access-k7zfj\") pod \"placement-operator-controller-manager-5b964cf4cd-pfsg7\" (UID: \"df910b52-e35d-4099-abe9-676b2863ee90\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.451515 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hdb5\" (UniqueName: \"kubernetes.io/projected/6d0bc1fd-d786-402b-a7b0-4f31066900f9-kube-api-access-2hdb5\") pod \"test-operator-controller-manager-56f8bfcd9f-2wkdr\" (UID: \"6d0bc1fd-d786-402b-a7b0-4f31066900f9\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.451547 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr4rp\" (UniqueName: \"kubernetes.io/projected/43cff116-70bd-4e43-a6a2-cffaebca6f11-kube-api-access-dr4rp\") pod \"watcher-operator-controller-manager-564965969-p7ht5\" (UID: \"43cff116-70bd-4e43-a6a2-cffaebca6f11\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.451654 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlnrh\" (UniqueName: \"kubernetes.io/projected/a0b07b7e-8267-4062-8cf1-9319d4258d13-kube-api-access-jlnrh\") pod \"telemetry-operator-controller-manager-64b5b76f97-msxfl\" (UID: \"a0b07b7e-8267-4062-8cf1-9319d4258d13\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.452594 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-dlbz5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.492334 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpxzj\" (UniqueName: \"kubernetes.io/projected/ab883058-7fba-4506-8493-a1c290b67a44-kube-api-access-cpxzj\") pod \"swift-operator-controller-manager-68fc8c869-kcq2k\" (UID: \"ab883058-7fba-4506-8493-a1c290b67a44\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.492470 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.494729 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.497829 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.498048 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.504030 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-ncld8" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.534423 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7zfj\" (UniqueName: \"kubernetes.io/projected/df910b52-e35d-4099-abe9-676b2863ee90-kube-api-access-k7zfj\") pod \"placement-operator-controller-manager-5b964cf4cd-pfsg7\" (UID: \"df910b52-e35d-4099-abe9-676b2863ee90\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552583 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlnrh\" (UniqueName: \"kubernetes.io/projected/a0b07b7e-8267-4062-8cf1-9319d4258d13-kube-api-access-jlnrh\") pod \"telemetry-operator-controller-manager-64b5b76f97-msxfl\" (UID: \"a0b07b7e-8267-4062-8cf1-9319d4258d13\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552684 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552727 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552752 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9snmd\" (UniqueName: \"kubernetes.io/projected/84f27435-bc45-4501-8dda-59f399689054-kube-api-access-9snmd\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552781 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hdb5\" (UniqueName: \"kubernetes.io/projected/6d0bc1fd-d786-402b-a7b0-4f31066900f9-kube-api-access-2hdb5\") pod \"test-operator-controller-manager-56f8bfcd9f-2wkdr\" (UID: \"6d0bc1fd-d786-402b-a7b0-4f31066900f9\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.552804 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr4rp\" (UniqueName: \"kubernetes.io/projected/43cff116-70bd-4e43-a6a2-cffaebca6f11-kube-api-access-dr4rp\") pod \"watcher-operator-controller-manager-564965969-p7ht5\" (UID: \"43cff116-70bd-4e43-a6a2-cffaebca6f11\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.569518 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.569576 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.571229 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.571353 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.572524 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.574000 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-kfxws" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.574348 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr4rp\" (UniqueName: \"kubernetes.io/projected/43cff116-70bd-4e43-a6a2-cffaebca6f11-kube-api-access-dr4rp\") pod \"watcher-operator-controller-manager-564965969-p7ht5\" (UID: \"43cff116-70bd-4e43-a6a2-cffaebca6f11\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.577029 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlnrh\" (UniqueName: \"kubernetes.io/projected/a0b07b7e-8267-4062-8cf1-9319d4258d13-kube-api-access-jlnrh\") pod \"telemetry-operator-controller-manager-64b5b76f97-msxfl\" (UID: \"a0b07b7e-8267-4062-8cf1-9319d4258d13\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.582825 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hdb5\" (UniqueName: \"kubernetes.io/projected/6d0bc1fd-d786-402b-a7b0-4f31066900f9-kube-api-access-2hdb5\") pod \"test-operator-controller-manager-56f8bfcd9f-2wkdr\" (UID: \"6d0bc1fd-d786-402b-a7b0-4f31066900f9\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.603833 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.619100 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.657345 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.657444 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.657482 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9snmd\" (UniqueName: \"kubernetes.io/projected/84f27435-bc45-4501-8dda-59f399689054-kube-api-access-9snmd\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.657553 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22p72\" (UniqueName: \"kubernetes.io/projected/4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a-kube-api-access-22p72\") pod \"rabbitmq-cluster-operator-manager-668c99d594-nr98t\" (UID: \"4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.657570 4712 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.657672 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:54:59.157643579 +0000 UTC m=+965.251525420 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "metrics-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.660824 4712 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.660927 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:54:59.160903368 +0000 UTC m=+965.254785209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.688019 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9snmd\" (UniqueName: \"kubernetes.io/projected/84f27435-bc45-4501-8dda-59f399689054-kube-api-access-9snmd\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.697310 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s"] Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.729106 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.742942 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.758736 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22p72\" (UniqueName: \"kubernetes.io/projected/4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a-kube-api-access-22p72\") pod \"rabbitmq-cluster-operator-manager-668c99d594-nr98t\" (UID: \"4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.788066 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22p72\" (UniqueName: \"kubernetes.io/projected/4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a-kube-api-access-22p72\") pod \"rabbitmq-cluster-operator-manager-668c99d594-nr98t\" (UID: \"4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.789893 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.835450 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.864002 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.864187 4712 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: E0131 05:54:58.864235 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert podName:cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd nodeName:}" failed. No retries permitted until 2026-01-31 05:54:59.864220857 +0000 UTC m=+965.958102688 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert") pod "openstack-baremetal-operator-controller-manager-86dfb79cc789484" (UID: "cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:54:58 crc kubenswrapper[4712]: I0131 05:54:58.972056 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.007872 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb"] Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.028072 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode48bf123_5e17_4ef3_980f_92286c95bd85.slice/crio-685677397b964b50a683cf922f2e0c19c4b2a0d73c8d836f543f50e5176f4f0c WatchSource:0}: Error finding container 685677397b964b50a683cf922f2e0c19c4b2a0d73c8d836f543f50e5176f4f0c: Status 404 returned error can't find the container with id 685677397b964b50a683cf922f2e0c19c4b2a0d73c8d836f543f50e5176f4f0c Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.170050 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.170124 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.170278 4712 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.170312 4712 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.170334 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:00.170320102 +0000 UTC m=+966.264201943 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "webhook-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.170391 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:00.170369703 +0000 UTC m=+966.264251554 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "metrics-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.176065 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" event={"ID":"e48bf123-5e17-4ef3-980f-92286c95bd85","Type":"ContainerStarted","Data":"685677397b964b50a683cf922f2e0c19c4b2a0d73c8d836f543f50e5176f4f0c"} Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.178113 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" event={"ID":"c0a2fda3-cecc-40e9-b15e-2d95487c7373","Type":"ContainerStarted","Data":"2bf5137b1736b0ba5de7f64dbbdded82c65af32003197fd03c63c4a56215cc0a"} Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.181382 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerStarted","Data":"56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d"} Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.186633 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" event={"ID":"e8223e38-a5ce-4f9f-9780-dea80a326f17","Type":"ContainerStarted","Data":"790391d98361caa7796fa272b7424f1ac0a2c66f4b77a32e466801f8aae6977e"} Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.273255 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.273451 4712 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.273502 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert podName:d0f0cc18-6437-4c23-8ebd-f0a234fc72ff nodeName:}" failed. No retries permitted until 2026-01-31 05:55:01.273486117 +0000 UTC m=+967.367367958 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert") pod "infra-operator-controller-manager-79955696d6-pdzn7" (UID: "d0f0cc18-6437-4c23-8ebd-f0a234fc72ff") : secret "infra-operator-webhook-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.429902 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hhfzr" podStartSLOduration=3.8862743650000002 podStartE2EDuration="6.429883131s" podCreationTimestamp="2026-01-31 05:54:53 +0000 UTC" firstStartedPulling="2026-01-31 05:54:55.121864071 +0000 UTC m=+961.215745902" lastFinishedPulling="2026-01-31 05:54:57.665472827 +0000 UTC m=+963.759354668" observedRunningTime="2026-01-31 05:54:59.204518829 +0000 UTC m=+965.298400680" watchObservedRunningTime="2026-01-31 05:54:59.429883131 +0000 UTC m=+965.523764972" Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.432012 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf"] Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.434071 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb81b1954_214b_40b7_886d_3da110000383.slice/crio-2a775ca69380d18bdf394be123871d36e4162abadfb56cfc89fd69bf85095c37 WatchSource:0}: Error finding container 2a775ca69380d18bdf394be123871d36e4162abadfb56cfc89fd69bf85095c37: Status 404 returned error can't find the container with id 2a775ca69380d18bdf394be123871d36e4162abadfb56cfc89fd69bf85095c37 Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.459920 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.479375 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.498887 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.515962 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw"] Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.545311 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a9eefe0_c80b_479c_a630_4b94bea52b20.slice/crio-d93eb731caab7d1def7660824acd0f5a03d355e2f5bd6cbda4d7796283927bc0 WatchSource:0}: Error finding container d93eb731caab7d1def7660824acd0f5a03d355e2f5bd6cbda4d7796283927bc0: Status 404 returned error can't find the container with id d93eb731caab7d1def7660824acd0f5a03d355e2f5bd6cbda4d7796283927bc0 Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.547041 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.568148 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.579682 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.590761 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.602053 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.614886 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.624678 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-n9768"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.639485 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb"] Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.658831 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7c522e9_e789_4fa5_9736_b6d921eba9e5.slice/crio-37bcd3f2798bef1e5fc11a46fcc9d6868f0e8488b7661e07f49f217c3bf395c1 WatchSource:0}: Error finding container 37bcd3f2798bef1e5fc11a46fcc9d6868f0e8488b7661e07f49f217c3bf395c1: Status 404 returned error can't find the container with id 37bcd3f2798bef1e5fc11a46fcc9d6868f0e8488b7661e07f49f217c3bf395c1 Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.682681 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76f477c9_248d_45e0_acdc_098fd960378c.slice/crio-3f790c43cf59f6b89a22a5a2024d73f474bc85c23bc7d522777e07ea5507d543 WatchSource:0}: Error finding container 3f790c43cf59f6b89a22a5a2024d73f474bc85c23bc7d522777e07ea5507d543: Status 404 returned error can't find the container with id 3f790c43cf59f6b89a22a5a2024d73f474bc85c23bc7d522777e07ea5507d543 Jan 31 05:54:59 crc kubenswrapper[4712]: W0131 05:54:59.693961 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46eafe76_b842_4889_98b5_eae45c6c9a70.slice/crio-c730254b27814630f489b085487807c617b36c97126e575cc2b20020b2ffe3f9 WatchSource:0}: Error finding container c730254b27814630f489b085487807c617b36c97126e575cc2b20020b2ffe3f9: Status 404 returned error can't find the container with id c730254b27814630f489b085487807c617b36c97126e575cc2b20020b2ffe3f9 Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.730838 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tz57v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-f9sm2_openstack-operators(76f477c9-248d-45e0-acdc-098fd960378c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.734911 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podUID="76f477c9-248d-45e0-acdc-098fd960378c" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.737309 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j9bc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-qwh9v_openstack-operators(46eafe76-b842-4889-98b5-eae45c6c9a70): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.739111 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podUID="46eafe76-b842-4889-98b5-eae45c6c9a70" Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.755064 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr"] Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.807580 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2hdb5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-2wkdr_openstack-operators(6d0bc1fd-d786-402b-a7b0-4f31066900f9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.807814 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-22p72,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-nr98t_openstack-operators(4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.811759 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podUID="4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.813997 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podUID="6d0bc1fd-d786-402b-a7b0-4f31066900f9" Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.829199 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.851367 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl"] Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.865502 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jlnrh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-64b5b76f97-msxfl_openstack-operators(a0b07b7e-8267-4062-8cf1-9319d4258d13): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.866778 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podUID="a0b07b7e-8267-4062-8cf1-9319d4258d13" Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.868505 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-p7ht5"] Jan 31 05:54:59 crc kubenswrapper[4712]: I0131 05:54:59.903566 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.903921 4712 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:54:59 crc kubenswrapper[4712]: E0131 05:54:59.904029 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert podName:cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd nodeName:}" failed. No retries permitted until 2026-01-31 05:55:01.903992741 +0000 UTC m=+967.997874582 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert") pod "openstack-baremetal-operator-controller-manager-86dfb79cc789484" (UID: "cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.203938 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" event={"ID":"f94c7f2e-7429-4be0-bad9-f3cdf0156ba9","Type":"ContainerStarted","Data":"e888f82baee4a9260b012e8c3f2d27fffbedeb1b961f24da911e55ee1f81004a"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.204956 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" event={"ID":"f7c522e9-e789-4fa5-9736-b6d921eba9e5","Type":"ContainerStarted","Data":"37bcd3f2798bef1e5fc11a46fcc9d6868f0e8488b7661e07f49f217c3bf395c1"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.207665 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" event={"ID":"d7b48b1c-633e-4714-a9fe-0cdb81dc946d","Type":"ContainerStarted","Data":"3b9a3779d43e717dd6ea88534f23e1b57cbe86c9f14fae7194fb9497a64651ac"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.210407 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.210587 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.210771 4712 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.210843 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:02.210823404 +0000 UTC m=+968.304705245 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "metrics-server-cert" not found Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.210849 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" event={"ID":"4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a","Type":"ContainerStarted","Data":"a2264c9ce62be0a2d829d5ea28c5f23a750dd3a3e37ffc462c17c40d8d302707"} Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.210939 4712 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.210990 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:02.210980768 +0000 UTC m=+968.304862609 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "webhook-server-cert" not found Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.216488 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podUID="4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.220182 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" event={"ID":"0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a","Type":"ContainerStarted","Data":"fe868ad99bb8fbb5a8baf7f9307f61496b8de4e8e02267cb013b3fdab389f0f2"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.228791 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" event={"ID":"ddfcf8fb-8920-44fa-a439-ea5d5b6456f4","Type":"ContainerStarted","Data":"952ce9fac3c0dd86c59e23f5bfa508d4db5e2fc49d1b77febbf22b6e2f4fcfae"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.249645 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" event={"ID":"6d0bc1fd-d786-402b-a7b0-4f31066900f9","Type":"ContainerStarted","Data":"6390ab81c973c4b7007155b2d00f987dff55ffa2cbb2e9efe4fef0afd6036b45"} Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.253851 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podUID="6d0bc1fd-d786-402b-a7b0-4f31066900f9" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.273711 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" event={"ID":"76f477c9-248d-45e0-acdc-098fd960378c","Type":"ContainerStarted","Data":"3f790c43cf59f6b89a22a5a2024d73f474bc85c23bc7d522777e07ea5507d543"} Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.280230 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podUID="76f477c9-248d-45e0-acdc-098fd960378c" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.293470 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" event={"ID":"43cff116-70bd-4e43-a6a2-cffaebca6f11","Type":"ContainerStarted","Data":"646151a2d2dd9d03e8fb6f39d357008ad9315a2394864682433ef1ca21d82a47"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.302307 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" event={"ID":"ab883058-7fba-4506-8493-a1c290b67a44","Type":"ContainerStarted","Data":"c433bed03b8e444212f267452e0208149ec73222468446f7df72fcd72fb6fca3"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.303516 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" event={"ID":"46eafe76-b842-4889-98b5-eae45c6c9a70","Type":"ContainerStarted","Data":"c730254b27814630f489b085487807c617b36c97126e575cc2b20020b2ffe3f9"} Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.324635 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podUID="46eafe76-b842-4889-98b5-eae45c6c9a70" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.344511 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" event={"ID":"623f0661-5fd0-4c1c-94b8-7cb41dc60f5f","Type":"ContainerStarted","Data":"d45a8379f7c7315add62978b01e96ec493d9c8db3839c03cd02b609c1146efc8"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.349384 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" event={"ID":"a0b07b7e-8267-4062-8cf1-9319d4258d13","Type":"ContainerStarted","Data":"477e8de5927c2c064b19054aa15198886f900cad97537e2b762ca0444be6cfcb"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.351242 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" event={"ID":"b348e7d3-8e8d-484d-bd03-b27125c4fd58","Type":"ContainerStarted","Data":"7fa15a6584fca73e1ac67e02f024416795629a675276a0553df14411b005af7f"} Jan 31 05:55:00 crc kubenswrapper[4712]: E0131 05:55:00.370239 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podUID="a0b07b7e-8267-4062-8cf1-9319d4258d13" Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.377330 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" event={"ID":"b81b1954-214b-40b7-886d-3da110000383","Type":"ContainerStarted","Data":"2a775ca69380d18bdf394be123871d36e4162abadfb56cfc89fd69bf85095c37"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.384957 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" event={"ID":"df910b52-e35d-4099-abe9-676b2863ee90","Type":"ContainerStarted","Data":"ca05b7ba0d70d2aabea8cbd33544930d67e314f627b367388df594da696076bd"} Jan 31 05:55:00 crc kubenswrapper[4712]: I0131 05:55:00.388681 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" event={"ID":"2a9eefe0-c80b-479c-a630-4b94bea52b20","Type":"ContainerStarted","Data":"d93eb731caab7d1def7660824acd0f5a03d355e2f5bd6cbda4d7796283927bc0"} Jan 31 05:55:01 crc kubenswrapper[4712]: I0131 05:55:01.274188 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.274408 4712 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.274493 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert podName:d0f0cc18-6437-4c23-8ebd-f0a234fc72ff nodeName:}" failed. No retries permitted until 2026-01-31 05:55:05.274472575 +0000 UTC m=+971.368354416 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert") pod "infra-operator-controller-manager-79955696d6-pdzn7" (UID: "d0f0cc18-6437-4c23-8ebd-f0a234fc72ff") : secret "infra-operator-webhook-server-cert" not found Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.407775 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podUID="76f477c9-248d-45e0-acdc-098fd960378c" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.409089 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podUID="a0b07b7e-8267-4062-8cf1-9319d4258d13" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.409225 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podUID="4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.409292 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podUID="46eafe76-b842-4889-98b5-eae45c6c9a70" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.409338 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podUID="6d0bc1fd-d786-402b-a7b0-4f31066900f9" Jan 31 05:55:01 crc kubenswrapper[4712]: I0131 05:55:01.993226 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.993413 4712 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:55:01 crc kubenswrapper[4712]: E0131 05:55:01.993464 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert podName:cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd nodeName:}" failed. No retries permitted until 2026-01-31 05:55:05.993446879 +0000 UTC m=+972.087328710 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert") pod "openstack-baremetal-operator-controller-manager-86dfb79cc789484" (UID: "cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:55:02 crc kubenswrapper[4712]: I0131 05:55:02.301760 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:02 crc kubenswrapper[4712]: I0131 05:55:02.301837 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:02 crc kubenswrapper[4712]: E0131 05:55:02.301976 4712 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 31 05:55:02 crc kubenswrapper[4712]: E0131 05:55:02.302020 4712 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 31 05:55:02 crc kubenswrapper[4712]: E0131 05:55:02.302087 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:06.302055125 +0000 UTC m=+972.395937156 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "metrics-server-cert" not found Jan 31 05:55:02 crc kubenswrapper[4712]: E0131 05:55:02.302115 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:06.302104696 +0000 UTC m=+972.395986767 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "webhook-server-cert" not found Jan 31 05:55:03 crc kubenswrapper[4712]: I0131 05:55:03.832209 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:55:03 crc kubenswrapper[4712]: I0131 05:55:03.832298 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:55:03 crc kubenswrapper[4712]: I0131 05:55:03.884382 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:55:04 crc kubenswrapper[4712]: I0131 05:55:04.514563 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:55:04 crc kubenswrapper[4712]: I0131 05:55:04.856400 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:55:05 crc kubenswrapper[4712]: I0131 05:55:05.357142 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:55:05 crc kubenswrapper[4712]: E0131 05:55:05.357428 4712 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 31 05:55:05 crc kubenswrapper[4712]: E0131 05:55:05.357561 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert podName:d0f0cc18-6437-4c23-8ebd-f0a234fc72ff nodeName:}" failed. No retries permitted until 2026-01-31 05:55:13.357533174 +0000 UTC m=+979.451415035 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert") pod "infra-operator-controller-manager-79955696d6-pdzn7" (UID: "d0f0cc18-6437-4c23-8ebd-f0a234fc72ff") : secret "infra-operator-webhook-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: I0131 05:55:06.069947 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.070160 4712 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.070262 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert podName:cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd nodeName:}" failed. No retries permitted until 2026-01-31 05:55:14.070237646 +0000 UTC m=+980.164119487 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert") pod "openstack-baremetal-operator-controller-manager-86dfb79cc789484" (UID: "cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: I0131 05:55:06.374975 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:06 crc kubenswrapper[4712]: I0131 05:55:06.375106 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.375601 4712 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.375708 4712 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.375738 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:14.375705456 +0000 UTC m=+980.469587317 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "webhook-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: E0131 05:55:06.375760 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs podName:84f27435-bc45-4501-8dda-59f399689054 nodeName:}" failed. No retries permitted until 2026-01-31 05:55:14.375752277 +0000 UTC m=+980.469634138 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs") pod "openstack-operator-controller-manager-6b6f655c79-fdkzc" (UID: "84f27435-bc45-4501-8dda-59f399689054") : secret "metrics-server-cert" not found Jan 31 05:55:06 crc kubenswrapper[4712]: I0131 05:55:06.465399 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" containerID="cri-o://56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" gracePeriod=2 Jan 31 05:55:07 crc kubenswrapper[4712]: I0131 05:55:07.476979 4712 generic.go:334] "Generic (PLEG): container finished" podID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" exitCode=0 Jan 31 05:55:07 crc kubenswrapper[4712]: I0131 05:55:07.477054 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerDied","Data":"56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d"} Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.629650 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.649617 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.654481 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.726497 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m86w5\" (UniqueName: \"kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.726599 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.726764 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.829515 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.829800 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.829906 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m86w5\" (UniqueName: \"kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.832095 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.832629 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.858041 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m86w5\" (UniqueName: \"kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5\") pod \"community-operators-wqtq8\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:08 crc kubenswrapper[4712]: I0131 05:55:08.985125 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:55:12 crc kubenswrapper[4712]: I0131 05:55:12.497310 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:55:12 crc kubenswrapper[4712]: I0131 05:55:12.497970 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:55:13 crc kubenswrapper[4712]: I0131 05:55:13.412471 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:55:13 crc kubenswrapper[4712]: I0131 05:55:13.428055 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d0f0cc18-6437-4c23-8ebd-f0a234fc72ff-cert\") pod \"infra-operator-controller-manager-79955696d6-pdzn7\" (UID: \"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:55:13 crc kubenswrapper[4712]: I0131 05:55:13.466878 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:55:13 crc kubenswrapper[4712]: E0131 05:55:13.833511 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:13 crc kubenswrapper[4712]: E0131 05:55:13.835004 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:13 crc kubenswrapper[4712]: E0131 05:55:13.835447 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:13 crc kubenswrapper[4712]: E0131 05:55:13.835506 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.126595 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.139684 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd-cert\") pod \"openstack-baremetal-operator-controller-manager-86dfb79cc789484\" (UID: \"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.154697 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.432518 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.433066 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.436939 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-metrics-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.444388 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/84f27435-bc45-4501-8dda-59f399689054-webhook-certs\") pod \"openstack-operator-controller-manager-6b6f655c79-fdkzc\" (UID: \"84f27435-bc45-4501-8dda-59f399689054\") " pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.717039 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.835569 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.838315 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.847034 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.942813 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.942886 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zbwg\" (UniqueName: \"kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:14 crc kubenswrapper[4712]: I0131 05:55:14.942972 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.045215 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.045309 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.045330 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zbwg\" (UniqueName: \"kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.046035 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.046702 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.066872 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zbwg\" (UniqueName: \"kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg\") pod \"redhat-marketplace-gxjkw\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:15 crc kubenswrapper[4712]: I0131 05:55:15.168130 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:55:17 crc kubenswrapper[4712]: E0131 05:55:17.173919 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898" Jan 31 05:55:17 crc kubenswrapper[4712]: E0131 05:55:17.174532 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bvrdz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-8d874c8fc-74szb_openstack-operators(c0a2fda3-cecc-40e9-b15e-2d95487c7373): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:17 crc kubenswrapper[4712]: E0131 05:55:17.176085 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" podUID="c0a2fda3-cecc-40e9-b15e-2d95487c7373" Jan 31 05:55:17 crc kubenswrapper[4712]: E0131 05:55:17.579445 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:6e21a1dda86ba365817102d23a5d4d2d5dcd1c4d8e5f8d74bd24548aa8c63898\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" podUID="c0a2fda3-cecc-40e9-b15e-2d95487c7373" Jan 31 05:55:18 crc kubenswrapper[4712]: E0131 05:55:18.378797 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4" Jan 31 05:55:18 crc kubenswrapper[4712]: E0131 05:55:18.379024 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cwrpb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-8886f4c47-z96q2_openstack-operators(b348e7d3-8e8d-484d-bd03-b27125c4fd58): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:18 crc kubenswrapper[4712]: E0131 05:55:18.382141 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" podUID="b348e7d3-8e8d-484d-bd03-b27125c4fd58" Jan 31 05:55:18 crc kubenswrapper[4712]: E0131 05:55:18.585781 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:1f593e8d49d02b6484c89632192ae54771675c54fbd8426e3675b8e20ecfd7c4\\\"\"" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" podUID="b348e7d3-8e8d-484d-bd03-b27125c4fd58" Jan 31 05:55:19 crc kubenswrapper[4712]: E0131 05:55:19.281861 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382" Jan 31 05:55:19 crc kubenswrapper[4712]: E0131 05:55:19.282265 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cpxzj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-68fc8c869-kcq2k_openstack-operators(ab883058-7fba-4506-8493-a1c290b67a44): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:19 crc kubenswrapper[4712]: E0131 05:55:19.284354 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" podUID="ab883058-7fba-4506-8493-a1c290b67a44" Jan 31 05:55:19 crc kubenswrapper[4712]: E0131 05:55:19.591844 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:42ad717de1b82267d244b016e5491a5b66a5c3deb6b8c2906a379e1296a2c382\\\"\"" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" podUID="ab883058-7fba-4506-8493-a1c290b67a44" Jan 31 05:55:20 crc kubenswrapper[4712]: E0131 05:55:20.687695 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10" Jan 31 05:55:20 crc kubenswrapper[4712]: E0131 05:55:20.687966 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-57jgx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-69d6db494d-ktrfv_openstack-operators(623f0661-5fd0-4c1c-94b8-7cb41dc60f5f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:20 crc kubenswrapper[4712]: E0131 05:55:20.689280 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" podUID="623f0661-5fd0-4c1c-94b8-7cb41dc60f5f" Jan 31 05:55:21 crc kubenswrapper[4712]: E0131 05:55:21.546521 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:d9f6f8dc6a6dd9b0d7c96e4c89b3056291fd61f11126a1304256a4d6cacd0382" Jan 31 05:55:21 crc kubenswrapper[4712]: E0131 05:55:21.546753 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:d9f6f8dc6a6dd9b0d7c96e4c89b3056291fd61f11126a1304256a4d6cacd0382,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z5ldf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-6d9697b7f4-vmvmz_openstack-operators(e48bf123-5e17-4ef3-980f-92286c95bd85): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:21 crc kubenswrapper[4712]: E0131 05:55:21.548085 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" podUID="e48bf123-5e17-4ef3-980f-92286c95bd85" Jan 31 05:55:21 crc kubenswrapper[4712]: E0131 05:55:21.608878 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:27d83ada27cf70cda0c5738f97551d81f1ea4068e83a090f3312e22172d72e10\\\"\"" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" podUID="623f0661-5fd0-4c1c-94b8-7cb41dc60f5f" Jan 31 05:55:21 crc kubenswrapper[4712]: E0131 05:55:21.610690 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:d9f6f8dc6a6dd9b0d7c96e4c89b3056291fd61f11126a1304256a4d6cacd0382\\\"\"" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" podUID="e48bf123-5e17-4ef3-980f-92286c95bd85" Jan 31 05:55:22 crc kubenswrapper[4712]: E0131 05:55:22.568537 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6" Jan 31 05:55:22 crc kubenswrapper[4712]: E0131 05:55:22.568839 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hx9p2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-585dbc889-5vrvb_openstack-operators(f7c522e9-e789-4fa5-9736-b6d921eba9e5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:22 crc kubenswrapper[4712]: E0131 05:55:22.570046 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" podUID="f7c522e9-e789-4fa5-9736-b6d921eba9e5" Jan 31 05:55:22 crc kubenswrapper[4712]: E0131 05:55:22.647185 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" podUID="f7c522e9-e789-4fa5-9736-b6d921eba9e5" Jan 31 05:55:23 crc kubenswrapper[4712]: E0131 05:55:23.833964 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:23 crc kubenswrapper[4712]: E0131 05:55:23.834374 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:23 crc kubenswrapper[4712]: E0131 05:55:23.834644 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:23 crc kubenswrapper[4712]: E0131 05:55:23.834681 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:55:25 crc kubenswrapper[4712]: E0131 05:55:25.371787 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17" Jan 31 05:55:25 crc kubenswrapper[4712]: E0131 05:55:25.372289 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rm8fc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-84f48565d4-2zprf_openstack-operators(b81b1954-214b-40b7-886d-3da110000383): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:25 crc kubenswrapper[4712]: E0131 05:55:25.373775 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" podUID="b81b1954-214b-40b7-886d-3da110000383" Jan 31 05:55:25 crc kubenswrapper[4712]: E0131 05:55:25.666779 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" podUID="b81b1954-214b-40b7-886d-3da110000383" Jan 31 05:55:27 crc kubenswrapper[4712]: E0131 05:55:27.498488 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:cd911e8d7a7a1104d77691dbaaf54370015cbb82859337746db5a9186d5dc566" Jan 31 05:55:27 crc kubenswrapper[4712]: E0131 05:55:27.499224 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:cd911e8d7a7a1104d77691dbaaf54370015cbb82859337746db5a9186d5dc566,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cdbzd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7dd968899f-n9768_openstack-operators(ddfcf8fb-8920-44fa-a439-ea5d5b6456f4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:55:27 crc kubenswrapper[4712]: E0131 05:55:27.502694 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" podUID="ddfcf8fb-8920-44fa-a439-ea5d5b6456f4" Jan 31 05:55:27 crc kubenswrapper[4712]: E0131 05:55:27.682028 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:cd911e8d7a7a1104d77691dbaaf54370015cbb82859337746db5a9186d5dc566\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" podUID="ddfcf8fb-8920-44fa-a439-ea5d5b6456f4" Jan 31 05:55:33 crc kubenswrapper[4712]: E0131 05:55:33.833299 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:33 crc kubenswrapper[4712]: E0131 05:55:33.835205 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:33 crc kubenswrapper[4712]: E0131 05:55:33.835810 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:33 crc kubenswrapper[4712]: E0131 05:55:33.835920 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:55:42 crc kubenswrapper[4712]: I0131 05:55:42.497448 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:55:42 crc kubenswrapper[4712]: I0131 05:55:42.498356 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:55:43 crc kubenswrapper[4712]: E0131 05:55:43.833533 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:43 crc kubenswrapper[4712]: E0131 05:55:43.834070 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:43 crc kubenswrapper[4712]: E0131 05:55:43.834653 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:43 crc kubenswrapper[4712]: E0131 05:55:43.834734 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:55:53 crc kubenswrapper[4712]: E0131 05:55:53.833949 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:53 crc kubenswrapper[4712]: E0131 05:55:53.837906 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:53 crc kubenswrapper[4712]: E0131 05:55:53.838791 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:55:53 crc kubenswrapper[4712]: E0131 05:55:53.838933 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:56:03 crc kubenswrapper[4712]: E0131 05:56:03.834478 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:03 crc kubenswrapper[4712]: E0131 05:56:03.836137 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:03 crc kubenswrapper[4712]: E0131 05:56:03.836848 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:03 crc kubenswrapper[4712]: E0131 05:56:03.836920 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:56:12 crc kubenswrapper[4712]: I0131 05:56:12.497537 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:56:12 crc kubenswrapper[4712]: I0131 05:56:12.499424 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:56:12 crc kubenswrapper[4712]: I0131 05:56:12.499545 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 05:56:12 crc kubenswrapper[4712]: I0131 05:56:12.501148 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 05:56:12 crc kubenswrapper[4712]: I0131 05:56:12.501391 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59" gracePeriod=600 Jan 31 05:56:13 crc kubenswrapper[4712]: E0131 05:56:13.833723 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:13 crc kubenswrapper[4712]: E0131 05:56:13.835240 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:13 crc kubenswrapper[4712]: E0131 05:56:13.835728 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:13 crc kubenswrapper[4712]: E0131 05:56:13.835792 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:56:23 crc kubenswrapper[4712]: E0131 05:56:23.834018 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:23 crc kubenswrapper[4712]: E0131 05:56:23.836466 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:23 crc kubenswrapper[4712]: E0131 05:56:23.837142 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:56:23 crc kubenswrapper[4712]: E0131 05:56:23.837308 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:33.833331 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:33.834829 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:33.835336 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:33.835465 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:43.833029 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:43.835129 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:43.835759 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:43.835846 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:53.834268 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:53.836316 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:53.836919 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:56:53.837002 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:03.833408 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:03.834966 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:03.835741 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" cmd=["grpc_health_probe","-addr=:50051"] Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:03.835775 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:06.901654 4712 log.go:32] "StopPodSandbox from runtime service failed" err="rpc error: code = DeadlineExceeded desc = context deadline exceeded" podSandboxID="ab1ef6ea31b01129e09d410dcff03fc3ae6fc389d297480caf4bde1c4b4c3ffc" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:06.901712 4712 kuberuntime_manager.go:1479] "Failed to stop sandbox" podSandboxID={"Type":"cri-o","ID":"ab1ef6ea31b01129e09d410dcff03fc3ae6fc389d297480caf4bde1c4b4c3ffc"} Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:06.901769 4712 kubelet.go:2041] "Unhandled Error" err="failed to \"KillPodSandbox\" for \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\" with KillPodSandboxError: \"rpc error: code = DeadlineExceeded desc = context deadline exceeded\"" logger="UnhandledError" Jan 31 05:57:10 crc kubenswrapper[4712]: E0131 05:57:06.902889 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"KillPodSandbox\" for \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\" with KillPodSandboxError: \"rpc error: code = DeadlineExceeded desc = context deadline exceeded\"" pod="openshift-marketplace/certified-operators-hhfzr" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" Jan 31 05:57:11 crc kubenswrapper[4712]: I0131 05:57:11.626764 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59" exitCode=0 Jan 31 05:57:11 crc kubenswrapper[4712]: I0131 05:57:11.626839 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59"} Jan 31 05:57:11 crc kubenswrapper[4712]: I0131 05:57:11.627481 4712 scope.go:117] "RemoveContainer" containerID="6c31bbfd8c8125c0094ca4ca6d9f21a2e8a425cef3c9f9b1a3d36d5e6b54a6c8" Jan 31 05:57:11 crc kubenswrapper[4712]: E0131 05:57:11.871570 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8" Jan 31 05:57:11 crc kubenswrapper[4712]: E0131 05:57:11.871884 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xkc82,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-5fb775575f-7x6wg_openstack-operators(f94c7f2e-7429-4be0-bad9-f3cdf0156ba9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:11 crc kubenswrapper[4712]: E0131 05:57:11.873104 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" podUID="f94c7f2e-7429-4be0-bad9-f3cdf0156ba9" Jan 31 05:57:12 crc kubenswrapper[4712]: E0131 05:57:12.637084 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:027cd7ab61ef5071d9ad6b729c95a98e51cd254642f01dc019d44cc98a9232f8\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" podUID="f94c7f2e-7429-4be0-bad9-f3cdf0156ba9" Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.697364 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.734938 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities\") pod \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.735057 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgdz6\" (UniqueName: \"kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6\") pod \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.735441 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content\") pod \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\" (UID: \"1639c9ad-583d-4947-92fa-24d85dd1e2a2\") " Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.736085 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities" (OuterVolumeSpecName: "utilities") pod "1639c9ad-583d-4947-92fa-24d85dd1e2a2" (UID: "1639c9ad-583d-4947-92fa-24d85dd1e2a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.736960 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.743243 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6" (OuterVolumeSpecName: "kube-api-access-dgdz6") pod "1639c9ad-583d-4947-92fa-24d85dd1e2a2" (UID: "1639c9ad-583d-4947-92fa-24d85dd1e2a2"). InnerVolumeSpecName "kube-api-access-dgdz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:57:13 crc kubenswrapper[4712]: I0131 05:57:13.839709 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgdz6\" (UniqueName: \"kubernetes.io/projected/1639c9ad-583d-4947-92fa-24d85dd1e2a2-kube-api-access-dgdz6\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.476999 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1639c9ad-583d-4947-92fa-24d85dd1e2a2" (UID: "1639c9ad-583d-4947-92fa-24d85dd1e2a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.552906 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1639c9ad-583d-4947-92fa-24d85dd1e2a2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.648399 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hhfzr" event={"ID":"1639c9ad-583d-4947-92fa-24d85dd1e2a2","Type":"ContainerDied","Data":"ab1ef6ea31b01129e09d410dcff03fc3ae6fc389d297480caf4bde1c4b4c3ffc"} Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.648441 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hhfzr" Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.669108 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:57:14 crc kubenswrapper[4712]: I0131 05:57:14.674903 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hhfzr"] Jan 31 05:57:16 crc kubenswrapper[4712]: I0131 05:57:16.512529 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" path="/var/lib/kubelet/pods/1639c9ad-583d-4947-92fa-24d85dd1e2a2/volumes" Jan 31 05:57:18 crc kubenswrapper[4712]: E0131 05:57:18.061385 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:bead175f27e5f074f723694f3b66e5aa7238411bf8a27a267b9a2936e4465521" Jan 31 05:57:18 crc kubenswrapper[4712]: E0131 05:57:18.061688 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:bead175f27e5f074f723694f3b66e5aa7238411bf8a27a267b9a2936e4465521,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nvfnd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-5f4b8bd54d-9djc7_openstack-operators(d7b48b1c-633e-4714-a9fe-0cdb81dc946d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:18 crc kubenswrapper[4712]: E0131 05:57:18.062951 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" podUID="d7b48b1c-633e-4714-a9fe-0cdb81dc946d" Jan 31 05:57:18 crc kubenswrapper[4712]: E0131 05:57:18.922972 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:bead175f27e5f074f723694f3b66e5aa7238411bf8a27a267b9a2936e4465521\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" podUID="d7b48b1c-633e-4714-a9fe-0cdb81dc946d" Jan 31 05:57:20 crc kubenswrapper[4712]: E0131 05:57:20.477276 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a" Jan 31 05:57:20 crc kubenswrapper[4712]: E0131 05:57:20.477852 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jlnrh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-64b5b76f97-msxfl_openstack-operators(a0b07b7e-8267-4062-8cf1-9319d4258d13): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:20 crc kubenswrapper[4712]: E0131 05:57:20.479107 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podUID="a0b07b7e-8267-4062-8cf1-9319d4258d13" Jan 31 05:57:22 crc kubenswrapper[4712]: E0131 05:57:22.512623 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241" Jan 31 05:57:22 crc kubenswrapper[4712]: E0131 05:57:22.513169 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2hdb5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-2wkdr_openstack-operators(6d0bc1fd-d786-402b-a7b0-4f31066900f9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:22 crc kubenswrapper[4712]: E0131 05:57:22.514373 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podUID="6d0bc1fd-d786-402b-a7b0-4f31066900f9" Jan 31 05:57:23 crc kubenswrapper[4712]: E0131 05:57:23.341079 4712 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = DeadlineExceeded desc = context deadline exceeded" filter="nil" Jan 31 05:57:23 crc kubenswrapper[4712]: E0131 05:57:23.341236 4712 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = DeadlineExceeded desc = context deadline exceeded" Jan 31 05:57:23 crc kubenswrapper[4712]: I0131 05:57:23.341250 4712 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = DeadlineExceeded desc = context deadline exceeded" Jan 31 05:57:23 crc kubenswrapper[4712]: E0131 05:57:23.409376 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be" Jan 31 05:57:23 crc kubenswrapper[4712]: E0131 05:57:23.409556 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tz57v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-f9sm2_openstack-operators(76f477c9-248d-45e0-acdc-098fd960378c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:23 crc kubenswrapper[4712]: E0131 05:57:23.410833 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podUID="76f477c9-248d-45e0-acdc-098fd960378c" Jan 31 05:57:25 crc kubenswrapper[4712]: E0131 05:57:25.479579 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e" Jan 31 05:57:25 crc kubenswrapper[4712]: E0131 05:57:25.480136 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j9bc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-qwh9v_openstack-operators(46eafe76-b842-4889-98b5-eae45c6c9a70): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:25 crc kubenswrapper[4712]: E0131 05:57:25.481600 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podUID="46eafe76-b842-4889-98b5-eae45c6c9a70" Jan 31 05:57:26 crc kubenswrapper[4712]: E0131 05:57:26.013311 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 31 05:57:26 crc kubenswrapper[4712]: E0131 05:57:26.013859 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-22p72,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-nr98t_openstack-operators(4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:57:26 crc kubenswrapper[4712]: E0131 05:57:26.015251 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podUID="4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.088618 4712 scope.go:117] "RemoveContainer" containerID="56495dbce152687e349d0a18530d987a7a6b823e743c815ed7fb46bd2cb7151d" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.268705 4712 scope.go:117] "RemoveContainer" containerID="ed362019272fcbd07cd35b362f029f9ed566935769847e5dd1a88b9c91da7d6d" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.347077 4712 scope.go:117] "RemoveContainer" containerID="8b983a9d830986fba2c5cc590eab7735ffc983e1eb7e91dc7f8a656b70a2f0ad" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.550148 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7"] Jan 31 05:57:26 crc kubenswrapper[4712]: W0131 05:57:26.563501 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0f0cc18_6437_4c23_8ebd_f0a234fc72ff.slice/crio-aabef0fcf974113a4a88da09e19d436127ac1192ff0338b339f0508c8fe477c4 WatchSource:0}: Error finding container aabef0fcf974113a4a88da09e19d436127ac1192ff0338b339f0508c8fe477c4: Status 404 returned error can't find the container with id aabef0fcf974113a4a88da09e19d436127ac1192ff0338b339f0508c8fe477c4 Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.569727 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.578155 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484"] Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.627531 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc"] Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.722394 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.745514 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" event={"ID":"ddfcf8fb-8920-44fa-a439-ea5d5b6456f4","Type":"ContainerStarted","Data":"e6b1ebf0c60eca4094d23e4aeb07f77d41ed4bd463c47a1b11aa2efeed310442"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.747035 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.767009 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.786189 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" event={"ID":"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd","Type":"ContainerStarted","Data":"932f6525d3b58c249f2eeb0e9a7d09bbe3c55282839388b366436d58b7f1fae4"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.786582 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" podStartSLOduration=3.526641617 podStartE2EDuration="2m29.786563416s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.682994135 +0000 UTC m=+965.776875976" lastFinishedPulling="2026-01-31 05:57:25.942915934 +0000 UTC m=+1112.036797775" observedRunningTime="2026-01-31 05:57:26.780006717 +0000 UTC m=+1112.873888578" watchObservedRunningTime="2026-01-31 05:57:26.786563416 +0000 UTC m=+1112.880445247" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.823553 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" event={"ID":"b81b1954-214b-40b7-886d-3da110000383","Type":"ContainerStarted","Data":"be3d7713ada28e9db8a3255d75bdd48f539c3f7edd5218a5515f6323d305db1c"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.824889 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.860882 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" podStartSLOduration=3.360805349 podStartE2EDuration="2m29.860859426s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.436706827 +0000 UTC m=+965.530588668" lastFinishedPulling="2026-01-31 05:57:25.936760904 +0000 UTC m=+1112.030642745" observedRunningTime="2026-01-31 05:57:26.856629443 +0000 UTC m=+1112.950511284" watchObservedRunningTime="2026-01-31 05:57:26.860859426 +0000 UTC m=+1112.954741267" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.870516 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" event={"ID":"f7c522e9-e789-4fa5-9736-b6d921eba9e5","Type":"ContainerStarted","Data":"13b159a7d327d234074deec61fbaea4edb5e4a54b5331de36dad1e20d837ca44"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.873321 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.904344 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" podStartSLOduration=3.6328372 podStartE2EDuration="2m29.904317408s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.671791464 +0000 UTC m=+965.765673305" lastFinishedPulling="2026-01-31 05:57:25.943271672 +0000 UTC m=+1112.037153513" observedRunningTime="2026-01-31 05:57:26.894580202 +0000 UTC m=+1112.988462043" watchObservedRunningTime="2026-01-31 05:57:26.904317408 +0000 UTC m=+1112.998199269" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.946608 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" event={"ID":"e48bf123-5e17-4ef3-980f-92286c95bd85","Type":"ContainerStarted","Data":"76936fffc1d395e18fb8dd9d75320e9e31f3999d0b647a57c5c8ac3c251ef10e"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.947384 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.961614 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" event={"ID":"ab883058-7fba-4506-8493-a1c290b67a44","Type":"ContainerStarted","Data":"6270ff4242d830ceba6f5b219b78d9c7ca7a834f990fa2a32f428cd73b6b5cb2"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.962464 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.984443 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" event={"ID":"0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a","Type":"ContainerStarted","Data":"87101ba08c4579f165f9dd84a28412ed00381a4ec81847ecf5caba49f20afe12"} Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.984772 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:57:26 crc kubenswrapper[4712]: I0131 05:57:26.985811 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" podStartSLOduration=3.088902203 podStartE2EDuration="2m29.985785681s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.040114792 +0000 UTC m=+965.133996633" lastFinishedPulling="2026-01-31 05:57:25.93699827 +0000 UTC m=+1112.030880111" observedRunningTime="2026-01-31 05:57:26.979573491 +0000 UTC m=+1113.073455332" watchObservedRunningTime="2026-01-31 05:57:26.985785681 +0000 UTC m=+1113.079667522" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.001183 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" event={"ID":"2a9eefe0-c80b-479c-a630-4b94bea52b20","Type":"ContainerStarted","Data":"8540686989fc0de9c0dde08c7751d146e7029c8cf519a4086ae43dc0ec3dbfd4"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.001752 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.009578 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.017621 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" podStartSLOduration=3.742231559 podStartE2EDuration="2m30.017598182s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.656005431 +0000 UTC m=+965.749887272" lastFinishedPulling="2026-01-31 05:57:25.931372054 +0000 UTC m=+1112.025253895" observedRunningTime="2026-01-31 05:57:27.013681977 +0000 UTC m=+1113.107563818" watchObservedRunningTime="2026-01-31 05:57:27.017598182 +0000 UTC m=+1113.111480023" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.026448 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" event={"ID":"43cff116-70bd-4e43-a6a2-cffaebca6f11","Type":"ContainerStarted","Data":"4a209f037e6bafca1040679bcd33adf404b536c42783c95a714f3fd85bae23fa"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.027451 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.055462 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" event={"ID":"b348e7d3-8e8d-484d-bd03-b27125c4fd58","Type":"ContainerStarted","Data":"b423a40e2ecd5c7bbfd5582fd48de34d3d09be3014ee512d845affe83a6af11e"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.057694 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.085298 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" event={"ID":"84f27435-bc45-4501-8dda-59f399689054","Type":"ContainerStarted","Data":"1247ccf619ac144cbfc092b16a9f9c657a3576a6244a93053ef87526de8cea0e"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.110415 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" event={"ID":"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff","Type":"ContainerStarted","Data":"aabef0fcf974113a4a88da09e19d436127ac1192ff0338b339f0508c8fe477c4"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.128420 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" event={"ID":"c0a2fda3-cecc-40e9-b15e-2d95487c7373","Type":"ContainerStarted","Data":"f7eb4af3affc5e84afc2f5c5452ac525e198de4cbc50f1bc929765aa17396505"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.128935 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.132066 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" podStartSLOduration=7.958451235 podStartE2EDuration="2m30.132047164s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.656055792 +0000 UTC m=+965.749937633" lastFinishedPulling="2026-01-31 05:57:21.829651721 +0000 UTC m=+1107.923533562" observedRunningTime="2026-01-31 05:57:27.128720144 +0000 UTC m=+1113.222601985" watchObservedRunningTime="2026-01-31 05:57:27.132047164 +0000 UTC m=+1113.225929005" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.148860 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" event={"ID":"e8223e38-a5ce-4f9f-9780-dea80a326f17","Type":"ContainerStarted","Data":"f93c42a01378d590394b475178cc0000e6192434657f47b80826c3fdcba27f52"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.149614 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.160876 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" event={"ID":"df910b52-e35d-4099-abe9-676b2863ee90","Type":"ContainerStarted","Data":"b13bf0775be88bc940c78e083c8406f901702727f333be0265dd44fc3069eebf"} Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.161662 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.174845 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" podStartSLOduration=7.895182347 podStartE2EDuration="2m30.17482777s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.550788436 +0000 UTC m=+965.644670277" lastFinishedPulling="2026-01-31 05:57:21.830433859 +0000 UTC m=+1107.924315700" observedRunningTime="2026-01-31 05:57:27.17230879 +0000 UTC m=+1113.266190631" watchObservedRunningTime="2026-01-31 05:57:27.17482777 +0000 UTC m=+1113.268709611" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.210444 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" podStartSLOduration=3.757641268 podStartE2EDuration="2m30.210420153s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.491375349 +0000 UTC m=+965.585257190" lastFinishedPulling="2026-01-31 05:57:25.944154234 +0000 UTC m=+1112.038036075" observedRunningTime="2026-01-31 05:57:27.207128713 +0000 UTC m=+1113.301010554" watchObservedRunningTime="2026-01-31 05:57:27.210420153 +0000 UTC m=+1113.304301994" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.267041 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" podStartSLOduration=3.366022885 podStartE2EDuration="2m30.267018593s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.043156616 +0000 UTC m=+965.137038457" lastFinishedPulling="2026-01-31 05:57:25.944152324 +0000 UTC m=+1112.038034165" observedRunningTime="2026-01-31 05:57:27.265558388 +0000 UTC m=+1113.359440239" watchObservedRunningTime="2026-01-31 05:57:27.267018593 +0000 UTC m=+1113.360900434" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.306469 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" podStartSLOduration=8.057717753 podStartE2EDuration="2m30.306447958s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.587061364 +0000 UTC m=+965.680943205" lastFinishedPulling="2026-01-31 05:57:21.835791559 +0000 UTC m=+1107.929673410" observedRunningTime="2026-01-31 05:57:27.305764652 +0000 UTC m=+1113.399646513" watchObservedRunningTime="2026-01-31 05:57:27.306447958 +0000 UTC m=+1113.400329799" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.412976 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" podStartSLOduration=8.439893022 podStartE2EDuration="2m30.412939578s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.862583899 +0000 UTC m=+965.956465740" lastFinishedPulling="2026-01-31 05:57:21.835630455 +0000 UTC m=+1107.929512296" observedRunningTime="2026-01-31 05:57:27.366322408 +0000 UTC m=+1113.460204259" watchObservedRunningTime="2026-01-31 05:57:27.412939578 +0000 UTC m=+1113.506821419" Jan 31 05:57:27 crc kubenswrapper[4712]: I0131 05:57:27.562201 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" podStartSLOduration=120.241123919 podStartE2EDuration="2m30.562147111s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:58.807557115 +0000 UTC m=+964.901438956" lastFinishedPulling="2026-01-31 05:55:29.128580287 +0000 UTC m=+995.222462148" observedRunningTime="2026-01-31 05:57:27.413806738 +0000 UTC m=+1113.507688579" watchObservedRunningTime="2026-01-31 05:57:27.562147111 +0000 UTC m=+1113.656028952" Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.171197 4712 generic.go:334] "Generic (PLEG): container finished" podID="4061d25d-10d7-4107-b6e7-d776945214d7" containerID="aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0" exitCode=0 Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.171295 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerDied","Data":"aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.171588 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerStarted","Data":"d04d5b22b1cdab19b882538f49a2140ce30789c3fc01d142d8f8a70ee45cbf2b"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.174367 4712 generic.go:334] "Generic (PLEG): container finished" podID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerID="a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42" exitCode=0 Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.174459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerDied","Data":"a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.174486 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerStarted","Data":"bc17feda981f50f91fd493ace83f093aee9427b37a4c8e04433c6fe0a170dea2"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.177265 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" event={"ID":"84f27435-bc45-4501-8dda-59f399689054","Type":"ContainerStarted","Data":"c578779b89c4e2ba99b4efb921061f4c81d0ef23f859c0dd87cae7c4cff74b89"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.178906 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.237038 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" event={"ID":"623f0661-5fd0-4c1c-94b8-7cb41dc60f5f","Type":"ContainerStarted","Data":"13f451a378196bde55855262ebf4cbeb3ce863a73227d83e0f3f62a8aec55fc4"} Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.237459 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:57:28 crc kubenswrapper[4712]: I0131 05:57:28.304483 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" podStartSLOduration=150.30445866 podStartE2EDuration="2m30.30445866s" podCreationTimestamp="2026-01-31 05:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:57:28.298372252 +0000 UTC m=+1114.392254093" watchObservedRunningTime="2026-01-31 05:57:28.30445866 +0000 UTC m=+1114.398340501" Jan 31 05:57:29 crc kubenswrapper[4712]: I0131 05:57:29.253598 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" event={"ID":"f94c7f2e-7429-4be0-bad9-f3cdf0156ba9","Type":"ContainerStarted","Data":"742e3de8cc1ef1cff0e834ef52bd5841d10165cc130042d5589823ddd70b8e99"} Jan 31 05:57:29 crc kubenswrapper[4712]: I0131 05:57:29.255734 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:57:29 crc kubenswrapper[4712]: I0131 05:57:29.262571 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerStarted","Data":"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0"} Jan 31 05:57:29 crc kubenswrapper[4712]: I0131 05:57:29.281281 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" podStartSLOduration=3.539209568 podStartE2EDuration="2m32.281259578s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.521853147 +0000 UTC m=+965.615734998" lastFinishedPulling="2026-01-31 05:57:28.263903167 +0000 UTC m=+1114.357785008" observedRunningTime="2026-01-31 05:57:29.27099352 +0000 UTC m=+1115.364875361" watchObservedRunningTime="2026-01-31 05:57:29.281259578 +0000 UTC m=+1115.375141419" Jan 31 05:57:29 crc kubenswrapper[4712]: I0131 05:57:29.284292 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" podStartSLOduration=5.827618244 podStartE2EDuration="2m32.284266851s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.480374043 +0000 UTC m=+965.574255884" lastFinishedPulling="2026-01-31 05:57:25.93702265 +0000 UTC m=+1112.030904491" observedRunningTime="2026-01-31 05:57:28.331520706 +0000 UTC m=+1114.425402537" watchObservedRunningTime="2026-01-31 05:57:29.284266851 +0000 UTC m=+1115.378148702" Jan 31 05:57:30 crc kubenswrapper[4712]: I0131 05:57:30.271235 4712 generic.go:334] "Generic (PLEG): container finished" podID="4061d25d-10d7-4107-b6e7-d776945214d7" containerID="e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a" exitCode=0 Jan 31 05:57:30 crc kubenswrapper[4712]: I0131 05:57:30.271422 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerDied","Data":"e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a"} Jan 31 05:57:30 crc kubenswrapper[4712]: I0131 05:57:30.284693 4712 generic.go:334] "Generic (PLEG): container finished" podID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerID="ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0" exitCode=0 Jan 31 05:57:30 crc kubenswrapper[4712]: I0131 05:57:30.284784 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerDied","Data":"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.301071 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" event={"ID":"cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd","Type":"ContainerStarted","Data":"a3f8994dabdfdfb309a3b760ca6d48d425f96c2293fb2ebc42b7e30d5d5c15fe"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.302999 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.305456 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerStarted","Data":"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.307875 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerStarted","Data":"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.309869 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" event={"ID":"d0f0cc18-6437-4c23-8ebd-f0a234fc72ff","Type":"ContainerStarted","Data":"34f11e9711d81fe6b5b8de43119013a7ed941b5bb205dd6aa90aa1ebfea87648"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.310333 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.311587 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" event={"ID":"d7b48b1c-633e-4714-a9fe-0cdb81dc946d","Type":"ContainerStarted","Data":"e5dcc60d968fc14d85f9b12b1f0c418185c865e293ee217b99d31f2defc499cb"} Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.312428 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.344546 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" podStartSLOduration=150.613554347 podStartE2EDuration="2m35.34452217s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:57:26.627431913 +0000 UTC m=+1112.721313754" lastFinishedPulling="2026-01-31 05:57:31.358399736 +0000 UTC m=+1117.452281577" observedRunningTime="2026-01-31 05:57:32.3358313 +0000 UTC m=+1118.429713161" watchObservedRunningTime="2026-01-31 05:57:32.34452217 +0000 UTC m=+1118.438404001" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.370380 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wqtq8" podStartSLOduration=141.160582454 podStartE2EDuration="2m24.370361616s" podCreationTimestamp="2026-01-31 05:55:08 +0000 UTC" firstStartedPulling="2026-01-31 05:57:28.227445524 +0000 UTC m=+1114.321327365" lastFinishedPulling="2026-01-31 05:57:31.437224686 +0000 UTC m=+1117.531106527" observedRunningTime="2026-01-31 05:57:32.363543411 +0000 UTC m=+1118.457425252" watchObservedRunningTime="2026-01-31 05:57:32.370361616 +0000 UTC m=+1118.464243457" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.385636 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gxjkw" podStartSLOduration=135.199584318 podStartE2EDuration="2m18.385612255s" podCreationTimestamp="2026-01-31 05:55:14 +0000 UTC" firstStartedPulling="2026-01-31 05:57:28.255439532 +0000 UTC m=+1114.349321373" lastFinishedPulling="2026-01-31 05:57:31.441467469 +0000 UTC m=+1117.535349310" observedRunningTime="2026-01-31 05:57:32.382710055 +0000 UTC m=+1118.476591906" watchObservedRunningTime="2026-01-31 05:57:32.385612255 +0000 UTC m=+1118.479494116" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.424278 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" podStartSLOduration=150.565341109 podStartE2EDuration="2m35.424261022s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:57:26.569416227 +0000 UTC m=+1112.663298068" lastFinishedPulling="2026-01-31 05:57:31.42833615 +0000 UTC m=+1117.522217981" observedRunningTime="2026-01-31 05:57:32.424148289 +0000 UTC m=+1118.518030130" watchObservedRunningTime="2026-01-31 05:57:32.424261022 +0000 UTC m=+1118.518142873" Jan 31 05:57:32 crc kubenswrapper[4712]: I0131 05:57:32.424789 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" podStartSLOduration=3.7161941670000003 podStartE2EDuration="2m35.424782144s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.73032602 +0000 UTC m=+965.824207861" lastFinishedPulling="2026-01-31 05:57:31.438913997 +0000 UTC m=+1117.532795838" observedRunningTime="2026-01-31 05:57:32.406807529 +0000 UTC m=+1118.500689370" watchObservedRunningTime="2026-01-31 05:57:32.424782144 +0000 UTC m=+1118.518663985" Jan 31 05:57:34 crc kubenswrapper[4712]: I0131 05:57:34.724072 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6b6f655c79-fdkzc" Jan 31 05:57:35 crc kubenswrapper[4712]: I0131 05:57:35.168918 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:35 crc kubenswrapper[4712]: I0131 05:57:35.169358 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:35 crc kubenswrapper[4712]: I0131 05:57:35.217778 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:35 crc kubenswrapper[4712]: E0131 05:57:35.506584 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f9bf288cd0c13912404027a58ea3b90d4092b641e8265adc5c88644ea7fe901a\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podUID="a0b07b7e-8267-4062-8cf1-9319d4258d13" Jan 31 05:57:35 crc kubenswrapper[4712]: E0131 05:57:35.506737 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podUID="76f477c9-248d-45e0-acdc-098fd960378c" Jan 31 05:57:37 crc kubenswrapper[4712]: E0131 05:57:37.506971 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podUID="6d0bc1fd-d786-402b-a7b0-4f31066900f9" Jan 31 05:57:37 crc kubenswrapper[4712]: E0131 05:57:37.507122 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podUID="46eafe76-b842-4889-98b5-eae45c6c9a70" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.636079 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-wz82s" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.675923 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-74szb" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.682897 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-vmvmz" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.715918 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-z96q2" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.764227 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.821618 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-7x6wg" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.894699 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-9djc7" Jan 31 05:57:37 crc kubenswrapper[4712]: I0131 05:57:37.923946 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2zprf" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.015702 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-n9768" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.101254 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-czjfw" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.122149 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-5vrvb" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.576759 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-2fg8n" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.637887 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-kcq2k" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.639620 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-pfsg7" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.794260 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-p7ht5" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.985874 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:38 crc kubenswrapper[4712]: I0131 05:57:38.986276 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:39 crc kubenswrapper[4712]: I0131 05:57:39.036544 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:39 crc kubenswrapper[4712]: I0131 05:57:39.407009 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:39 crc kubenswrapper[4712]: I0131 05:57:39.455970 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:57:40 crc kubenswrapper[4712]: E0131 05:57:40.506272 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podUID="4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a" Jan 31 05:57:41 crc kubenswrapper[4712]: I0131 05:57:41.379311 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wqtq8" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="registry-server" containerID="cri-o://908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11" gracePeriod=2 Jan 31 05:57:43 crc kubenswrapper[4712]: I0131 05:57:43.476515 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-pdzn7" Jan 31 05:57:44 crc kubenswrapper[4712]: I0131 05:57:44.163974 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86dfb79cc789484" Jan 31 05:57:44 crc kubenswrapper[4712]: I0131 05:57:44.973438 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.138452 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content\") pod \"564f7249-49ec-4608-9f4e-b61d993a8a0e\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.138571 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m86w5\" (UniqueName: \"kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5\") pod \"564f7249-49ec-4608-9f4e-b61d993a8a0e\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.138632 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities\") pod \"564f7249-49ec-4608-9f4e-b61d993a8a0e\" (UID: \"564f7249-49ec-4608-9f4e-b61d993a8a0e\") " Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.140011 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities" (OuterVolumeSpecName: "utilities") pod "564f7249-49ec-4608-9f4e-b61d993a8a0e" (UID: "564f7249-49ec-4608-9f4e-b61d993a8a0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.151429 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5" (OuterVolumeSpecName: "kube-api-access-m86w5") pod "564f7249-49ec-4608-9f4e-b61d993a8a0e" (UID: "564f7249-49ec-4608-9f4e-b61d993a8a0e"). InnerVolumeSpecName "kube-api-access-m86w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.197483 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "564f7249-49ec-4608-9f4e-b61d993a8a0e" (UID: "564f7249-49ec-4608-9f4e-b61d993a8a0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.216046 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.240901 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m86w5\" (UniqueName: \"kubernetes.io/projected/564f7249-49ec-4608-9f4e-b61d993a8a0e-kube-api-access-m86w5\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.241030 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.241045 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564f7249-49ec-4608-9f4e-b61d993a8a0e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.267633 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.410488 4712 generic.go:334] "Generic (PLEG): container finished" podID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerID="908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11" exitCode=0 Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.410562 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqtq8" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.410574 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerDied","Data":"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11"} Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.410613 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqtq8" event={"ID":"564f7249-49ec-4608-9f4e-b61d993a8a0e","Type":"ContainerDied","Data":"bc17feda981f50f91fd493ace83f093aee9427b37a4c8e04433c6fe0a170dea2"} Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.410634 4712 scope.go:117] "RemoveContainer" containerID="908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.411443 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gxjkw" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="registry-server" containerID="cri-o://ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608" gracePeriod=2 Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.430997 4712 scope.go:117] "RemoveContainer" containerID="ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.457844 4712 scope.go:117] "RemoveContainer" containerID="a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.459085 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.465700 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wqtq8"] Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.473993 4712 scope.go:117] "RemoveContainer" containerID="908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11" Jan 31 05:57:45 crc kubenswrapper[4712]: E0131 05:57:45.474621 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11\": container with ID starting with 908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11 not found: ID does not exist" containerID="908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.474659 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11"} err="failed to get container status \"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11\": rpc error: code = NotFound desc = could not find container \"908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11\": container with ID starting with 908fae88ddc68f76c78bc148a8788f86ea6d806dcd5e26c75b3ef4cdee849c11 not found: ID does not exist" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.474691 4712 scope.go:117] "RemoveContainer" containerID="ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0" Jan 31 05:57:45 crc kubenswrapper[4712]: E0131 05:57:45.474965 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0\": container with ID starting with ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0 not found: ID does not exist" containerID="ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.474992 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0"} err="failed to get container status \"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0\": rpc error: code = NotFound desc = could not find container \"ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0\": container with ID starting with ba5b582ca467edc84ef5ba48a1a850daa96a8d3ee669aa64c0bca316b73a76f0 not found: ID does not exist" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.475010 4712 scope.go:117] "RemoveContainer" containerID="a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42" Jan 31 05:57:45 crc kubenswrapper[4712]: E0131 05:57:45.475291 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42\": container with ID starting with a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42 not found: ID does not exist" containerID="a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42" Jan 31 05:57:45 crc kubenswrapper[4712]: I0131 05:57:45.475319 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42"} err="failed to get container status \"a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42\": rpc error: code = NotFound desc = could not find container \"a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42\": container with ID starting with a165f8617cef1605aa1e9fe494d63e8997866673f25daae514acdf76e9b8ce42 not found: ID does not exist" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.405162 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.419982 4712 generic.go:334] "Generic (PLEG): container finished" podID="4061d25d-10d7-4107-b6e7-d776945214d7" containerID="ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608" exitCode=0 Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.420048 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerDied","Data":"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608"} Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.420079 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gxjkw" event={"ID":"4061d25d-10d7-4107-b6e7-d776945214d7","Type":"ContainerDied","Data":"d04d5b22b1cdab19b882538f49a2140ce30789c3fc01d142d8f8a70ee45cbf2b"} Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.420097 4712 scope.go:117] "RemoveContainer" containerID="ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.420230 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gxjkw" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.444588 4712 scope.go:117] "RemoveContainer" containerID="e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.467681 4712 scope.go:117] "RemoveContainer" containerID="aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.492354 4712 scope.go:117] "RemoveContainer" containerID="ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608" Jan 31 05:57:46 crc kubenswrapper[4712]: E0131 05:57:46.492839 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608\": container with ID starting with ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608 not found: ID does not exist" containerID="ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.492874 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608"} err="failed to get container status \"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608\": rpc error: code = NotFound desc = could not find container \"ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608\": container with ID starting with ff487fcefbc2ea237026a66b8aea8ac8d537aaf398d2c11034b44197cd644608 not found: ID does not exist" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.492900 4712 scope.go:117] "RemoveContainer" containerID="e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a" Jan 31 05:57:46 crc kubenswrapper[4712]: E0131 05:57:46.493252 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a\": container with ID starting with e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a not found: ID does not exist" containerID="e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.493276 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a"} err="failed to get container status \"e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a\": rpc error: code = NotFound desc = could not find container \"e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a\": container with ID starting with e80d91abd20b03cf49759acb2aa2cd349e9fceb5e9803b555c6423d6b2a5d00a not found: ID does not exist" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.493293 4712 scope.go:117] "RemoveContainer" containerID="aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0" Jan 31 05:57:46 crc kubenswrapper[4712]: E0131 05:57:46.493778 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0\": container with ID starting with aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0 not found: ID does not exist" containerID="aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.493798 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0"} err="failed to get container status \"aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0\": rpc error: code = NotFound desc = could not find container \"aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0\": container with ID starting with aacea29d037a339d26f8ecee69072a1a7a1f758988e3e240c2517a1f190e27b0 not found: ID does not exist" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.521859 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" path="/var/lib/kubelet/pods/564f7249-49ec-4608-9f4e-b61d993a8a0e/volumes" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.569929 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content\") pod \"4061d25d-10d7-4107-b6e7-d776945214d7\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.570442 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zbwg\" (UniqueName: \"kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg\") pod \"4061d25d-10d7-4107-b6e7-d776945214d7\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.570505 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities\") pod \"4061d25d-10d7-4107-b6e7-d776945214d7\" (UID: \"4061d25d-10d7-4107-b6e7-d776945214d7\") " Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.572991 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities" (OuterVolumeSpecName: "utilities") pod "4061d25d-10d7-4107-b6e7-d776945214d7" (UID: "4061d25d-10d7-4107-b6e7-d776945214d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.578355 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg" (OuterVolumeSpecName: "kube-api-access-8zbwg") pod "4061d25d-10d7-4107-b6e7-d776945214d7" (UID: "4061d25d-10d7-4107-b6e7-d776945214d7"). InnerVolumeSpecName "kube-api-access-8zbwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.595635 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4061d25d-10d7-4107-b6e7-d776945214d7" (UID: "4061d25d-10d7-4107-b6e7-d776945214d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.672460 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.672496 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zbwg\" (UniqueName: \"kubernetes.io/projected/4061d25d-10d7-4107-b6e7-d776945214d7-kube-api-access-8zbwg\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.672511 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4061d25d-10d7-4107-b6e7-d776945214d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.761931 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:57:46 crc kubenswrapper[4712]: I0131 05:57:46.767379 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gxjkw"] Jan 31 05:57:48 crc kubenswrapper[4712]: I0131 05:57:48.513647 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" path="/var/lib/kubelet/pods/4061d25d-10d7-4107-b6e7-d776945214d7/volumes" Jan 31 05:57:57 crc kubenswrapper[4712]: I0131 05:57:57.544924 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" event={"ID":"76f477c9-248d-45e0-acdc-098fd960378c","Type":"ContainerStarted","Data":"070da9394aa083f8ecac05c428054b918f7a8be6e50399e9c5a32cdd5bbd4034"} Jan 31 05:57:57 crc kubenswrapper[4712]: I0131 05:57:57.545845 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:57:57 crc kubenswrapper[4712]: I0131 05:57:57.567747 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" podStartSLOduration=3.431774199 podStartE2EDuration="3m0.567717358s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.730646988 +0000 UTC m=+965.824528829" lastFinishedPulling="2026-01-31 05:57:56.866590107 +0000 UTC m=+1142.960471988" observedRunningTime="2026-01-31 05:57:57.559987861 +0000 UTC m=+1143.653869732" watchObservedRunningTime="2026-01-31 05:57:57.567717358 +0000 UTC m=+1143.661599199" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.562200 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" event={"ID":"4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a","Type":"ContainerStarted","Data":"ebb70b478d1b75215301839eb78fe1184088383f0bf0cc7c9774a0bdbc2e7e7a"} Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.564022 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" event={"ID":"46eafe76-b842-4889-98b5-eae45c6c9a70","Type":"ContainerStarted","Data":"28086f207aa1758cf377efd3627890bbc2c40ea364c3569d240465720d7f1621"} Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.566009 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" event={"ID":"6d0bc1fd-d786-402b-a7b0-4f31066900f9","Type":"ContainerStarted","Data":"abfb0de185d50faca8cf92d74534b72115324e6c69ce628ccfa1c65d2e3b0d57"} Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.566244 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.568547 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" event={"ID":"a0b07b7e-8267-4062-8cf1-9319d4258d13","Type":"ContainerStarted","Data":"423815906ec98bb7fa848d4f56df14d8b554611c1885f95305ebda2212e8c90a"} Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.568774 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.615775 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-nr98t" podStartSLOduration=2.425210477 podStartE2EDuration="3m1.615751721s" podCreationTimestamp="2026-01-31 05:54:58 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.807678241 +0000 UTC m=+965.901560082" lastFinishedPulling="2026-01-31 05:57:58.998219475 +0000 UTC m=+1145.092101326" observedRunningTime="2026-01-31 05:57:59.58058505 +0000 UTC m=+1145.674466891" watchObservedRunningTime="2026-01-31 05:57:59.615751721 +0000 UTC m=+1145.709633572" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.638839 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" podStartSLOduration=3.664711816 podStartE2EDuration="3m2.6388136s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.865336156 +0000 UTC m=+965.959217997" lastFinishedPulling="2026-01-31 05:57:58.83943793 +0000 UTC m=+1144.933319781" observedRunningTime="2026-01-31 05:57:59.635833528 +0000 UTC m=+1145.729715369" watchObservedRunningTime="2026-01-31 05:57:59.6388136 +0000 UTC m=+1145.732695441" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.647364 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" podStartSLOduration=3.543921004 podStartE2EDuration="3m2.647336457s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.737150394 +0000 UTC m=+965.831032235" lastFinishedPulling="2026-01-31 05:57:58.840565847 +0000 UTC m=+1144.934447688" observedRunningTime="2026-01-31 05:57:59.61235822 +0000 UTC m=+1145.706240081" watchObservedRunningTime="2026-01-31 05:57:59.647336457 +0000 UTC m=+1145.741218298" Jan 31 05:57:59 crc kubenswrapper[4712]: I0131 05:57:59.670494 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" podStartSLOduration=3.637488839 podStartE2EDuration="3m2.670462166s" podCreationTimestamp="2026-01-31 05:54:57 +0000 UTC" firstStartedPulling="2026-01-31 05:54:59.807293612 +0000 UTC m=+965.901175453" lastFinishedPulling="2026-01-31 05:57:58.840266889 +0000 UTC m=+1144.934148780" observedRunningTime="2026-01-31 05:57:59.66359327 +0000 UTC m=+1145.757475111" watchObservedRunningTime="2026-01-31 05:57:59.670462166 +0000 UTC m=+1145.764344007" Jan 31 05:58:08 crc kubenswrapper[4712]: I0131 05:58:08.137740 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:58:08 crc kubenswrapper[4712]: I0131 05:58:08.142732 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-qwh9v" Jan 31 05:58:08 crc kubenswrapper[4712]: I0131 05:58:08.343658 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-f9sm2" Jan 31 05:58:08 crc kubenswrapper[4712]: I0131 05:58:08.732076 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-msxfl" Jan 31 05:58:08 crc kubenswrapper[4712]: I0131 05:58:08.758861 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-2wkdr" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.621491 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623428 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623450 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623464 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623471 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623490 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623496 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623510 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623516 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="extract-content" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623525 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623532 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623542 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623548 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623559 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623565 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623572 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623577 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: E0131 05:58:26.623586 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623592 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="extract-utilities" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623729 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1639c9ad-583d-4947-92fa-24d85dd1e2a2" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623742 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="564f7249-49ec-4608-9f4e-b61d993a8a0e" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.623750 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4061d25d-10d7-4107-b6e7-d776945214d7" containerName="registry-server" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.624505 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.627645 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.627719 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-pxprt" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.627991 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.629804 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.684437 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.720782 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.720873 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fh8d\" (UniqueName: \"kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.729867 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.731029 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.737741 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.752002 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.822066 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljw7n\" (UniqueName: \"kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.822122 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.822230 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.822253 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fh8d\" (UniqueName: \"kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.822271 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.823060 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.846201 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fh8d\" (UniqueName: \"kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d\") pod \"dnsmasq-dns-5679d46597-szdrh\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.923603 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.923650 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.923689 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljw7n\" (UniqueName: \"kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.924665 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.924817 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.942906 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljw7n\" (UniqueName: \"kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n\") pod \"dnsmasq-dns-7b7dbc5f8f-bm6z5\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:26 crc kubenswrapper[4712]: I0131 05:58:26.946074 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:58:27 crc kubenswrapper[4712]: I0131 05:58:27.056064 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:58:27 crc kubenswrapper[4712]: I0131 05:58:27.508888 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:58:27 crc kubenswrapper[4712]: I0131 05:58:27.594757 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:58:27 crc kubenswrapper[4712]: W0131 05:58:27.596922 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd87d95b3_0f23_4c0d_8d97_750ae9379acd.slice/crio-60d2def02ddcba8e5ec846576cfd166a78f7be780e8a9fc687a089abeb10d48c WatchSource:0}: Error finding container 60d2def02ddcba8e5ec846576cfd166a78f7be780e8a9fc687a089abeb10d48c: Status 404 returned error can't find the container with id 60d2def02ddcba8e5ec846576cfd166a78f7be780e8a9fc687a089abeb10d48c Jan 31 05:58:27 crc kubenswrapper[4712]: I0131 05:58:27.788509 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" event={"ID":"d87d95b3-0f23-4c0d-8d97-750ae9379acd","Type":"ContainerStarted","Data":"60d2def02ddcba8e5ec846576cfd166a78f7be780e8a9fc687a089abeb10d48c"} Jan 31 05:58:27 crc kubenswrapper[4712]: I0131 05:58:27.789958 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5679d46597-szdrh" event={"ID":"cc3806d7-1bf1-411d-bfb3-b488b81963ca","Type":"ContainerStarted","Data":"2f2fa4f2bfc2a35f7bd4d5231ef9b58669b77cec07483e9a2fd8903333005137"} Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.601721 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.654591 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.655929 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.701723 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.779445 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.779502 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.779528 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnkql\" (UniqueName: \"kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.885193 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.885260 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.886261 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.886325 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.885296 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnkql\" (UniqueName: \"kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.926088 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.941640 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnkql\" (UniqueName: \"kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql\") pod \"dnsmasq-dns-57958b85c7-pkl69\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.976230 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.978626 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.985363 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:58:29 crc kubenswrapper[4712]: I0131 05:58:29.992223 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.099041 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.099117 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9gvk\" (UniqueName: \"kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.099198 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.201826 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.201918 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9gvk\" (UniqueName: \"kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.201988 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.205367 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.205391 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.233583 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9gvk\" (UniqueName: \"kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk\") pod \"dnsmasq-dns-5d677d9b95-l5l5s\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.323612 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.644258 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.722067 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.724865 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.728160 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.728378 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.728529 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.729257 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.729405 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jgrnb" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.729488 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.734715 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.760884 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.831683 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" event={"ID":"6deb14bc-bbe7-453b-9fb6-ec88ba84d463","Type":"ContainerStarted","Data":"b4807d66b3ba62ac05ca0ed3570792d487a5c9959bc146c75a0a4860858533d2"} Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.865755 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915061 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915124 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915232 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915258 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915302 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915356 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915393 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915416 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zvvh\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915438 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915464 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:30 crc kubenswrapper[4712]: I0131 05:58:30.915487 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017098 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017159 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017233 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017253 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zvvh\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017273 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017294 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017313 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017345 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017362 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017407 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.017425 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.019118 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.018749 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.019771 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.020191 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.021012 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.021135 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.025799 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.025929 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.026571 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.040476 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.049631 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zvvh\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.093388 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.095201 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.098301 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.098544 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.098597 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.098554 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fq9qc" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.098864 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.099074 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.099344 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.100141 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.102296 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230684 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76dch\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230770 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230804 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230830 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230862 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230890 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230921 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230958 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.230994 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.231026 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.231047 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332610 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332662 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332718 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332766 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332793 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332814 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332842 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.332868 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333287 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333320 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333124 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333340 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333380 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76dch\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333579 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.333901 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.335550 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.337676 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.347433 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.347935 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.348233 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.348652 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.351769 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76dch\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.371098 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.386933 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.497833 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.842493 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" event={"ID":"0bedfe15-450f-400f-8b12-0355d274a9c0","Type":"ContainerStarted","Data":"ecb77a7620f434f0b39bafd3b6e1f696c76cd3319d10078e1073461666c09887"} Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.987605 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.997094 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 31 05:58:31 crc kubenswrapper[4712]: I0131 05:58:31.998744 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.003809 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.003855 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.004133 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-bv49w" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.004339 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.014578 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.019386 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.092942 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160394 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160438 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5bwz\" (UniqueName: \"kubernetes.io/projected/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kube-api-access-v5bwz\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160508 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160528 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160546 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160609 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160629 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.160646 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262672 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262738 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262767 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262817 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262847 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5bwz\" (UniqueName: \"kubernetes.io/projected/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kube-api-access-v5bwz\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262919 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262946 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.262977 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.263117 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.263739 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.264960 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.265149 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.265415 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c54792d1-2de9-4c85-a843-35d4b14dd8e4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.271307 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.272264 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54792d1-2de9-4c85-a843-35d4b14dd8e4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.295067 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5bwz\" (UniqueName: \"kubernetes.io/projected/c54792d1-2de9-4c85-a843-35d4b14dd8e4-kube-api-access-v5bwz\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.299250 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"c54792d1-2de9-4c85-a843-35d4b14dd8e4\") " pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.353277 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.853895 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerStarted","Data":"bd66e456cdd71de02c25ec5db9cb8eadd0c463a8ed9d3b95c47be585cdb60eba"} Jan 31 05:58:32 crc kubenswrapper[4712]: I0131 05:58:32.857633 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerStarted","Data":"c6af7af08ffbb5d1a7ed07fe8656b63a292d2d6b9d3ac87793a15e24d3744857"} Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.003791 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 31 05:58:33 crc kubenswrapper[4712]: W0131 05:58:33.022614 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc54792d1_2de9_4c85_a843_35d4b14dd8e4.slice/crio-05484e3adf63e80196cfd139d4707a6662c26f1a1e45ad0a1a604ab00a1a1d8e WatchSource:0}: Error finding container 05484e3adf63e80196cfd139d4707a6662c26f1a1e45ad0a1a604ab00a1a1d8e: Status 404 returned error can't find the container with id 05484e3adf63e80196cfd139d4707a6662c26f1a1e45ad0a1a604ab00a1a1d8e Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.382325 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.384717 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.389243 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.389458 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.389600 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.396992 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-fhvzj" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.414374 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.525784 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.525843 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.525931 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.525956 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.525988 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf4fv\" (UniqueName: \"kubernetes.io/projected/9029816c-730b-4d38-9464-1d0ed936fc10-kube-api-access-tf4fv\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.526120 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.526153 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.526233 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.579335 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.580507 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.584723 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.584974 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-fp9k6" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.585159 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.607907 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631190 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631313 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kolla-config\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631358 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631377 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631448 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631470 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631509 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631525 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631547 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4rkm\" (UniqueName: \"kubernetes.io/projected/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kube-api-access-f4rkm\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631583 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf4fv\" (UniqueName: \"kubernetes.io/projected/9029816c-730b-4d38-9464-1d0ed936fc10-kube-api-access-tf4fv\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631623 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-config-data\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631649 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631669 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.631745 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.636734 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.643618 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.644554 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9029816c-730b-4d38-9464-1d0ed936fc10-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.644784 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9029816c-730b-4d38-9464-1d0ed936fc10-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.672386 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.676126 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.680159 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf4fv\" (UniqueName: \"kubernetes.io/projected/9029816c-730b-4d38-9464-1d0ed936fc10-kube-api-access-tf4fv\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.680518 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9029816c-730b-4d38-9464-1d0ed936fc10-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9029816c-730b-4d38-9464-1d0ed936fc10\") " pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.732849 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.732894 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.732952 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4rkm\" (UniqueName: \"kubernetes.io/projected/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kube-api-access-f4rkm\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.732991 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-config-data\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.733033 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kolla-config\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.736472 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kolla-config\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.737024 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c874b7f-26e1-436e-9cdc-a440a86b72ec-config-data\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.737124 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.737790 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c874b7f-26e1-436e-9cdc-a440a86b72ec-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.741800 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.766686 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4rkm\" (UniqueName: \"kubernetes.io/projected/5c874b7f-26e1-436e-9cdc-a440a86b72ec-kube-api-access-f4rkm\") pod \"memcached-0\" (UID: \"5c874b7f-26e1-436e-9cdc-a440a86b72ec\") " pod="openstack/memcached-0" Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.884088 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c54792d1-2de9-4c85-a843-35d4b14dd8e4","Type":"ContainerStarted","Data":"05484e3adf63e80196cfd139d4707a6662c26f1a1e45ad0a1a604ab00a1a1d8e"} Jan 31 05:58:33 crc kubenswrapper[4712]: I0131 05:58:33.911077 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 31 05:58:34 crc kubenswrapper[4712]: I0131 05:58:34.565777 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 31 05:58:34 crc kubenswrapper[4712]: I0131 05:58:34.842604 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 31 05:58:34 crc kubenswrapper[4712]: W0131 05:58:34.842782 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c874b7f_26e1_436e_9cdc_a440a86b72ec.slice/crio-6cbbcc634c1c8703f9bf66d1a22215ffb4285d414dd20d9b45a342cb1c77d3c1 WatchSource:0}: Error finding container 6cbbcc634c1c8703f9bf66d1a22215ffb4285d414dd20d9b45a342cb1c77d3c1: Status 404 returned error can't find the container with id 6cbbcc634c1c8703f9bf66d1a22215ffb4285d414dd20d9b45a342cb1c77d3c1 Jan 31 05:58:34 crc kubenswrapper[4712]: I0131 05:58:34.910778 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9029816c-730b-4d38-9464-1d0ed936fc10","Type":"ContainerStarted","Data":"f9984b8a503b64ee72320400f498e3c4f32d98844065600cb8a8e26aa0af2677"} Jan 31 05:58:34 crc kubenswrapper[4712]: I0131 05:58:34.916203 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5c874b7f-26e1-436e-9cdc-a440a86b72ec","Type":"ContainerStarted","Data":"6cbbcc634c1c8703f9bf66d1a22215ffb4285d414dd20d9b45a342cb1c77d3c1"} Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.664356 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.665809 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.671093 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.671684 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-xljq8" Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.684229 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmngt\" (UniqueName: \"kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt\") pod \"kube-state-metrics-0\" (UID: \"72eaf48a-98d5-44bb-abec-a88630d51ae7\") " pod="openstack/kube-state-metrics-0" Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.786710 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmngt\" (UniqueName: \"kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt\") pod \"kube-state-metrics-0\" (UID: \"72eaf48a-98d5-44bb-abec-a88630d51ae7\") " pod="openstack/kube-state-metrics-0" Jan 31 05:58:35 crc kubenswrapper[4712]: I0131 05:58:35.824088 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmngt\" (UniqueName: \"kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt\") pod \"kube-state-metrics-0\" (UID: \"72eaf48a-98d5-44bb-abec-a88630d51ae7\") " pod="openstack/kube-state-metrics-0" Jan 31 05:58:36 crc kubenswrapper[4712]: I0131 05:58:36.020019 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 05:58:36 crc kubenswrapper[4712]: I0131 05:58:36.652868 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 05:58:36 crc kubenswrapper[4712]: W0131 05:58:36.660264 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72eaf48a_98d5_44bb_abec_a88630d51ae7.slice/crio-72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd WatchSource:0}: Error finding container 72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd: Status 404 returned error can't find the container with id 72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd Jan 31 05:58:36 crc kubenswrapper[4712]: I0131 05:58:36.951488 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"72eaf48a-98d5-44bb-abec-a88630d51ae7","Type":"ContainerStarted","Data":"72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd"} Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.269992 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-xbh95"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.271734 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.275433 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vcvwf" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.275580 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.275783 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.289328 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-xzbjj"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.295014 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-xzbjj"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.295210 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.300739 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372342 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-combined-ca-bundle\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372396 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-log-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372434 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372472 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372503 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-run\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372522 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-log\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372539 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/222305d6-dde8-43bd-801c-7420d0a05add-scripts\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372557 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsbtc\" (UniqueName: \"kubernetes.io/projected/222305d6-dde8-43bd-801c-7420d0a05add-kube-api-access-bsbtc\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372585 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-etc-ovs\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372606 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-scripts\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372626 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-lib\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372648 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxsp4\" (UniqueName: \"kubernetes.io/projected/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-kube-api-access-zxsp4\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.372702 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-ovn-controller-tls-certs\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.473890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsbtc\" (UniqueName: \"kubernetes.io/projected/222305d6-dde8-43bd-801c-7420d0a05add-kube-api-access-bsbtc\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.473969 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-etc-ovs\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474014 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-scripts\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474040 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-lib\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474071 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxsp4\" (UniqueName: \"kubernetes.io/projected/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-kube-api-access-zxsp4\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474116 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-ovn-controller-tls-certs\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474160 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-combined-ca-bundle\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474211 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-log-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474258 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474303 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474329 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-run\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474347 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-log\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.474368 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/222305d6-dde8-43bd-801c-7420d0a05add-scripts\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.475775 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-etc-ovs\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476002 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-log-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476151 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-run\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476219 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476275 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-log\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476234 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-var-run-ovn\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.476359 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/222305d6-dde8-43bd-801c-7420d0a05add-var-lib\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.479635 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-scripts\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.491851 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-ovn-controller-tls-certs\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.498458 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsbtc\" (UniqueName: \"kubernetes.io/projected/222305d6-dde8-43bd-801c-7420d0a05add-kube-api-access-bsbtc\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.501774 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxsp4\" (UniqueName: \"kubernetes.io/projected/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-kube-api-access-zxsp4\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.522202 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fc3ee9-9ecc-45b7-8410-d9a6b2da5863-combined-ca-bundle\") pod \"ovn-controller-xbh95\" (UID: \"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863\") " pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.559438 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/222305d6-dde8-43bd-801c-7420d0a05add-scripts\") pod \"ovn-controller-ovs-xzbjj\" (UID: \"222305d6-dde8-43bd-801c-7420d0a05add\") " pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.622164 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:39.644010 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.084911 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.086675 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.089579 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.089801 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.090076 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-2t9xx" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.090225 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.090979 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.094067 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.213430 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql26w\" (UniqueName: \"kubernetes.io/projected/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-kube-api-access-ql26w\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.213687 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.213992 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.214067 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.214208 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.214231 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.214492 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.214559 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316709 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316758 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316863 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316892 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316917 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql26w\" (UniqueName: \"kubernetes.io/projected/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-kube-api-access-ql26w\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316947 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.316989 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.317014 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.318893 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.319123 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.319665 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.320214 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.324968 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.328861 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.333114 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.344164 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.347572 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql26w\" (UniqueName: \"kubernetes.io/projected/58cd6591-0ba3-4102-b8de-79b3c7d77f8e-kube-api-access-ql26w\") pod \"ovsdbserver-sb-0\" (UID: \"58cd6591-0ba3-4102-b8de-79b3c7d77f8e\") " pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.416337 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 31 05:58:40 crc kubenswrapper[4712]: I0131 05:58:40.807502 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95"] Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.110027 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-xzbjj"] Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.521913 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-wxrfr"] Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.528568 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.536550 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545052 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovs-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545104 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd7091c-c446-44de-a591-89bcd4901347-config\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545228 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovn-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545269 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545367 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2msv\" (UniqueName: \"kubernetes.io/projected/cdd7091c-c446-44de-a591-89bcd4901347-kube-api-access-p2msv\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.545428 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-combined-ca-bundle\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.568856 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-wxrfr"] Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648604 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovn-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648666 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648726 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2msv\" (UniqueName: \"kubernetes.io/projected/cdd7091c-c446-44de-a591-89bcd4901347-kube-api-access-p2msv\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648759 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-combined-ca-bundle\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648790 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovs-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.648816 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd7091c-c446-44de-a591-89bcd4901347-config\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.649140 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovn-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.649690 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd7091c-c446-44de-a591-89bcd4901347-config\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.649890 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/cdd7091c-c446-44de-a591-89bcd4901347-ovs-rundir\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.657950 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.665870 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd7091c-c446-44de-a591-89bcd4901347-combined-ca-bundle\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.670779 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2msv\" (UniqueName: \"kubernetes.io/projected/cdd7091c-c446-44de-a591-89bcd4901347-kube-api-access-p2msv\") pod \"ovn-controller-metrics-wxrfr\" (UID: \"cdd7091c-c446-44de-a591-89bcd4901347\") " pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:41 crc kubenswrapper[4712]: I0131 05:58:41.876148 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-wxrfr" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.638622 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.640850 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.647671 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.647957 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-jnm2q" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.648591 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.648709 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.679087 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791257 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791362 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791395 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791421 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791455 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q965\" (UniqueName: \"kubernetes.io/projected/3eb28efb-abfd-4570-8282-f0189e523fa3-kube-api-access-5q965\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791557 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791722 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-config\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.791760 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.893905 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-config\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894042 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894213 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894396 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894463 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894519 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894561 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q965\" (UniqueName: \"kubernetes.io/projected/3eb28efb-abfd-4570-8282-f0189e523fa3-kube-api-access-5q965\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894622 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894782 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-config\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.894577 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.895856 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.896249 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3eb28efb-abfd-4570-8282-f0189e523fa3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.898973 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.900042 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.915148 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q965\" (UniqueName: \"kubernetes.io/projected/3eb28efb-abfd-4570-8282-f0189e523fa3-kube-api-access-5q965\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.926487 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb28efb-abfd-4570-8282-f0189e523fa3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.941486 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3eb28efb-abfd-4570-8282-f0189e523fa3\") " pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:42 crc kubenswrapper[4712]: I0131 05:58:42.979211 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 31 05:58:45 crc kubenswrapper[4712]: W0131 05:58:45.397363 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod222305d6_dde8_43bd_801c_7420d0a05add.slice/crio-0359fd7d228bf0c51cb169b026c9ba59ac20e5994caaf9d6c1aea510daf7f6d8 WatchSource:0}: Error finding container 0359fd7d228bf0c51cb169b026c9ba59ac20e5994caaf9d6c1aea510daf7f6d8: Status 404 returned error can't find the container with id 0359fd7d228bf0c51cb169b026c9ba59ac20e5994caaf9d6c1aea510daf7f6d8 Jan 31 05:58:46 crc kubenswrapper[4712]: I0131 05:58:46.130044 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xzbjj" event={"ID":"222305d6-dde8-43bd-801c-7420d0a05add","Type":"ContainerStarted","Data":"0359fd7d228bf0c51cb169b026c9ba59ac20e5994caaf9d6c1aea510daf7f6d8"} Jan 31 05:58:46 crc kubenswrapper[4712]: I0131 05:58:46.131909 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95" event={"ID":"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863","Type":"ContainerStarted","Data":"e8cd495696d56ef5e718edf17a2c63a1ea233941005d91fce393a120967b94c0"} Jan 31 05:58:58 crc kubenswrapper[4712]: E0131 05:58:58.282147 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-mariadb:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:58:58 crc kubenswrapper[4712]: E0131 05:58:58.282954 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-mariadb:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:58:58 crc kubenswrapper[4712]: E0131 05:58:58.283113 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.rdoproject.org/podified-master-centos9/openstack-mariadb:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v5bwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(c54792d1-2de9-4c85-a843-35d4b14dd8e4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:58:58 crc kubenswrapper[4712]: E0131 05:58:58.284249 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="c54792d1-2de9-4c85-a843-35d4b14dd8e4" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.099116 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-memcached:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.099205 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-memcached:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.099384 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.rdoproject.org/podified-master-centos9/openstack-memcached:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n656h665h65dh57h69h55dh5f7h74h59fh674h5h5b4hchbch54ch54bh5f6h5ffh67ch76h585h676h5bbh6bhfh58ch659h659h55dh598h5c9h57cq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f4rkm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(5c874b7f-26e1-436e-9cdc-a440a86b72ec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.100629 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="5c874b7f-26e1-436e-9cdc-a440a86b72ec" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.275604 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-mariadb:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/openstack-galera-0" podUID="c54792d1-2de9-4c85-a843-35d4b14dd8e4" Jan 31 05:58:59 crc kubenswrapper[4712]: E0131 05:58:59.276106 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-memcached:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/memcached-0" podUID="5c874b7f-26e1-436e-9cdc-a440a86b72ec" Jan 31 05:59:00 crc kubenswrapper[4712]: E0131 05:59:00.438474 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-ovn-base:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:00 crc kubenswrapper[4712]: E0131 05:59:00.438901 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-ovn-base:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:00 crc kubenswrapper[4712]: E0131 05:59:00.439039 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.rdoproject.org/podified-master-centos9/openstack-ovn-base:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n78h54fhffh588h547h5bh9ch596h569h588h8ch57ch67h55bh575hb7hcbh56bh6bh68dhbfh659h57dh59dh565h68dh645h5d9h8dh8h6bh669q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bsbtc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-xzbjj_openstack(222305d6-dde8-43bd-801c-7420d0a05add): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:59:00 crc kubenswrapper[4712]: E0131 05:59:00.440631 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-xzbjj" podUID="222305d6-dde8-43bd-801c-7420d0a05add" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.289898 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-ovn-base:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/ovn-controller-ovs-xzbjj" podUID="222305d6-dde8-43bd-801c-7420d0a05add" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.334682 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.334740 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.334853 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljw7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7b7dbc5f8f-bm6z5_openstack(d87d95b3-0f23-4c0d-8d97-750ae9379acd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.336091 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" podUID="d87d95b3-0f23-4c0d-8d97-750ae9379acd" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.349332 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.349406 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.349552 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l9gvk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5d677d9b95-l5l5s_openstack(0bedfe15-450f-400f-8b12-0355d274a9c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.350755 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" podUID="0bedfe15-450f-400f-8b12-0355d274a9c0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.387306 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.387362 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.387484 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7fh8d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5679d46597-szdrh_openstack(cc3806d7-1bf1-411d-bfb3-b488b81963ca): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.389015 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5679d46597-szdrh" podUID="cc3806d7-1bf1-411d-bfb3-b488b81963ca" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.405889 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.405948 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.406064 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pnkql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57958b85c7-pkl69_openstack(6deb14bc-bbe7-453b-9fb6-ec88ba84d463): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 05:59:01 crc kubenswrapper[4712]: E0131 05:59:01.407427 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" podUID="6deb14bc-bbe7-453b-9fb6-ec88ba84d463" Jan 31 05:59:01 crc kubenswrapper[4712]: I0131 05:59:01.843688 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 31 05:59:01 crc kubenswrapper[4712]: I0131 05:59:01.938322 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 31 05:59:02 crc kubenswrapper[4712]: I0131 05:59:02.175664 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-wxrfr"] Jan 31 05:59:02 crc kubenswrapper[4712]: E0131 05:59:02.302756 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" podUID="6deb14bc-bbe7-453b-9fb6-ec88ba84d463" Jan 31 05:59:02 crc kubenswrapper[4712]: E0131 05:59:02.303324 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-neutron-server:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" podUID="0bedfe15-450f-400f-8b12-0355d274a9c0" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.075040 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.082010 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.206393 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljw7n\" (UniqueName: \"kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n\") pod \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.206529 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config\") pod \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.206614 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc\") pod \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.206657 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fh8d\" (UniqueName: \"kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d\") pod \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\" (UID: \"cc3806d7-1bf1-411d-bfb3-b488b81963ca\") " Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.206813 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config\") pod \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\" (UID: \"d87d95b3-0f23-4c0d-8d97-750ae9379acd\") " Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.207747 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config" (OuterVolumeSpecName: "config") pod "d87d95b3-0f23-4c0d-8d97-750ae9379acd" (UID: "d87d95b3-0f23-4c0d-8d97-750ae9379acd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.208975 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d87d95b3-0f23-4c0d-8d97-750ae9379acd" (UID: "d87d95b3-0f23-4c0d-8d97-750ae9379acd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.209460 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config" (OuterVolumeSpecName: "config") pod "cc3806d7-1bf1-411d-bfb3-b488b81963ca" (UID: "cc3806d7-1bf1-411d-bfb3-b488b81963ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.212507 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n" (OuterVolumeSpecName: "kube-api-access-ljw7n") pod "d87d95b3-0f23-4c0d-8d97-750ae9379acd" (UID: "d87d95b3-0f23-4c0d-8d97-750ae9379acd"). InnerVolumeSpecName "kube-api-access-ljw7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.212813 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d" (OuterVolumeSpecName: "kube-api-access-7fh8d") pod "cc3806d7-1bf1-411d-bfb3-b488b81963ca" (UID: "cc3806d7-1bf1-411d-bfb3-b488b81963ca"). InnerVolumeSpecName "kube-api-access-7fh8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.308754 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3806d7-1bf1-411d-bfb3-b488b81963ca-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.308791 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.308804 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fh8d\" (UniqueName: \"kubernetes.io/projected/cc3806d7-1bf1-411d-bfb3-b488b81963ca-kube-api-access-7fh8d\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.308815 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87d95b3-0f23-4c0d-8d97-750ae9379acd-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.308827 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljw7n\" (UniqueName: \"kubernetes.io/projected/d87d95b3-0f23-4c0d-8d97-750ae9379acd-kube-api-access-ljw7n\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.309700 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"58cd6591-0ba3-4102-b8de-79b3c7d77f8e","Type":"ContainerStarted","Data":"0d0466003b275a3897bdcd83323477d80d9567cc23a9f27a3064677269792d6f"} Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.311243 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" event={"ID":"d87d95b3-0f23-4c0d-8d97-750ae9379acd","Type":"ContainerDied","Data":"60d2def02ddcba8e5ec846576cfd166a78f7be780e8a9fc687a089abeb10d48c"} Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.311276 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.312897 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5679d46597-szdrh" Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.312937 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5679d46597-szdrh" event={"ID":"cc3806d7-1bf1-411d-bfb3-b488b81963ca","Type":"ContainerDied","Data":"2f2fa4f2bfc2a35f7bd4d5231ef9b58669b77cec07483e9a2fd8903333005137"} Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.321074 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-wxrfr" event={"ID":"cdd7091c-c446-44de-a591-89bcd4901347","Type":"ContainerStarted","Data":"1508c9bcd26fdc640d1ae3e0f3b38bce9c014462a4336226a2da3908517cd5e7"} Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.322482 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3eb28efb-abfd-4570-8282-f0189e523fa3","Type":"ContainerStarted","Data":"6a3e91ed5a8c45838eec28e7b62805332ac54ef9576805e8adf05d70659365f0"} Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.376490 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.383089 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b7dbc5f8f-bm6z5"] Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.430042 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:59:03 crc kubenswrapper[4712]: I0131 05:59:03.452130 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5679d46597-szdrh"] Jan 31 05:59:04 crc kubenswrapper[4712]: I0131 05:59:04.331114 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerStarted","Data":"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7"} Jan 31 05:59:04 crc kubenswrapper[4712]: I0131 05:59:04.518057 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc3806d7-1bf1-411d-bfb3-b488b81963ca" path="/var/lib/kubelet/pods/cc3806d7-1bf1-411d-bfb3-b488b81963ca/volumes" Jan 31 05:59:04 crc kubenswrapper[4712]: I0131 05:59:04.518721 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d87d95b3-0f23-4c0d-8d97-750ae9379acd" path="/var/lib/kubelet/pods/d87d95b3-0f23-4c0d-8d97-750ae9379acd/volumes" Jan 31 05:59:05 crc kubenswrapper[4712]: I0131 05:59:05.343969 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9029816c-730b-4d38-9464-1d0ed936fc10","Type":"ContainerStarted","Data":"7f98be5a00acc9b7d515d285781a3c6d89ae123d83085761e08a8581604d9a4c"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.354887 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-wxrfr" event={"ID":"cdd7091c-c446-44de-a591-89bcd4901347","Type":"ContainerStarted","Data":"93726d39d3cadcc0c06f2b888599bd7ef57c07ce95930cebaa4903538397f6b6"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.357845 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3eb28efb-abfd-4570-8282-f0189e523fa3","Type":"ContainerStarted","Data":"cb8e2b23ee6389deba1d44040a05348e1cf71333d5b34c3d8fd16b877c0e5309"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.358242 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3eb28efb-abfd-4570-8282-f0189e523fa3","Type":"ContainerStarted","Data":"24af6640388e26935876a3172a66c1e721abb0035d8a965ee416c750736c1989"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.359886 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"72eaf48a-98d5-44bb-abec-a88630d51ae7","Type":"ContainerStarted","Data":"506652cfe05af02cd801d6b0c1b91751bb6d62ea865452355ba91f1abcb256cf"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.360034 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.361409 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95" event={"ID":"16fc3ee9-9ecc-45b7-8410-d9a6b2da5863","Type":"ContainerStarted","Data":"9690f7b9ce99effaac96b89732c2725d5fd33f21cbb739a076e7e16ce34b5c9d"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.361589 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-xbh95" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.362914 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"58cd6591-0ba3-4102-b8de-79b3c7d77f8e","Type":"ContainerStarted","Data":"422d784cb667f35f6240aa7f7f76292ce10ea937f6db2dc04a17b6f26174ac41"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.362949 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"58cd6591-0ba3-4102-b8de-79b3c7d77f8e","Type":"ContainerStarted","Data":"20cbf007c9401f45d59d2b0412feef0fdea461c8cd4a31af5d9b00a1bdcebbd7"} Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.388378 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-wxrfr" podStartSLOduration=22.568622288 podStartE2EDuration="25.388331477s" podCreationTimestamp="2026-01-31 05:58:41 +0000 UTC" firstStartedPulling="2026-01-31 05:59:02.395883388 +0000 UTC m=+1208.489765229" lastFinishedPulling="2026-01-31 05:59:05.215592577 +0000 UTC m=+1211.309474418" observedRunningTime="2026-01-31 05:59:06.374239629 +0000 UTC m=+1212.468121480" watchObservedRunningTime="2026-01-31 05:59:06.388331477 +0000 UTC m=+1212.482213328" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.416097 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=24.588994892 podStartE2EDuration="27.416071813s" podCreationTimestamp="2026-01-31 05:58:39 +0000 UTC" firstStartedPulling="2026-01-31 05:59:02.387421999 +0000 UTC m=+1208.481303840" lastFinishedPulling="2026-01-31 05:59:05.21449892 +0000 UTC m=+1211.308380761" observedRunningTime="2026-01-31 05:59:06.407599514 +0000 UTC m=+1212.501481365" watchObservedRunningTime="2026-01-31 05:59:06.416071813 +0000 UTC m=+1212.509953674" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.452780 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.9159897 podStartE2EDuration="31.4527582s" podCreationTimestamp="2026-01-31 05:58:35 +0000 UTC" firstStartedPulling="2026-01-31 05:58:36.665911527 +0000 UTC m=+1182.759793358" lastFinishedPulling="2026-01-31 05:59:05.202680017 +0000 UTC m=+1211.296561858" observedRunningTime="2026-01-31 05:59:06.445305206 +0000 UTC m=+1212.539187057" watchObservedRunningTime="2026-01-31 05:59:06.4527582 +0000 UTC m=+1212.546640051" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.471933 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=22.667023632 podStartE2EDuration="25.471916275s" podCreationTimestamp="2026-01-31 05:58:41 +0000 UTC" firstStartedPulling="2026-01-31 05:59:02.395526439 +0000 UTC m=+1208.489408280" lastFinishedPulling="2026-01-31 05:59:05.200419062 +0000 UTC m=+1211.294300923" observedRunningTime="2026-01-31 05:59:06.46365063 +0000 UTC m=+1212.557532481" watchObservedRunningTime="2026-01-31 05:59:06.471916275 +0000 UTC m=+1212.565798106" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.665554 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-xbh95" podStartSLOduration=10.04699389 podStartE2EDuration="27.665538182s" podCreationTimestamp="2026-01-31 05:58:39 +0000 UTC" firstStartedPulling="2026-01-31 05:58:45.399769788 +0000 UTC m=+1191.493651629" lastFinishedPulling="2026-01-31 05:59:03.01831408 +0000 UTC m=+1209.112195921" observedRunningTime="2026-01-31 05:59:06.491682863 +0000 UTC m=+1212.585564724" watchObservedRunningTime="2026-01-31 05:59:06.665538182 +0000 UTC m=+1212.759420023" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.670115 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.724256 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.725753 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.740476 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.747277 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.808246 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvr8p\" (UniqueName: \"kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.808305 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.808349 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.808386 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.910458 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.910506 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvr8p\" (UniqueName: \"kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.910549 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.910587 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.911591 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.911595 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.912260 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.931143 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvr8p\" (UniqueName: \"kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p\") pod \"dnsmasq-dns-dccf88c6c-7jzjk\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.972049 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:59:06 crc kubenswrapper[4712]: I0131 05:59:06.982283 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.010899 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.012289 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.017091 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.042354 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.090657 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.115645 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.115702 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.115822 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hpf5\" (UniqueName: \"kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.116016 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.116084 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.211385 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.218159 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hpf5\" (UniqueName: \"kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.218246 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.218287 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.218402 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.218435 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.219945 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.220859 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.221548 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.226564 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.243214 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hpf5\" (UniqueName: \"kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5\") pod \"dnsmasq-dns-5b8496b6f5-h889x\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.320242 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config\") pod \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.320823 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc\") pod \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.320721 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config" (OuterVolumeSpecName: "config") pod "6deb14bc-bbe7-453b-9fb6-ec88ba84d463" (UID: "6deb14bc-bbe7-453b-9fb6-ec88ba84d463"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.321129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnkql\" (UniqueName: \"kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql\") pod \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\" (UID: \"6deb14bc-bbe7-453b-9fb6-ec88ba84d463\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.321373 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6deb14bc-bbe7-453b-9fb6-ec88ba84d463" (UID: "6deb14bc-bbe7-453b-9fb6-ec88ba84d463"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.322075 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.322104 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.324222 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql" (OuterVolumeSpecName: "kube-api-access-pnkql") pod "6deb14bc-bbe7-453b-9fb6-ec88ba84d463" (UID: "6deb14bc-bbe7-453b-9fb6-ec88ba84d463"). InnerVolumeSpecName "kube-api-access-pnkql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.331051 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.338098 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.381460 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerStarted","Data":"01388726fb02d90b2eb560005e90cf645b9e2dd51d9efcd0b3815fa84335cf2c"} Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.384788 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" event={"ID":"6deb14bc-bbe7-453b-9fb6-ec88ba84d463","Type":"ContainerDied","Data":"b4807d66b3ba62ac05ca0ed3570792d487a5c9959bc146c75a0a4860858533d2"} Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.384907 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57958b85c7-pkl69" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.389298 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.389332 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d677d9b95-l5l5s" event={"ID":"0bedfe15-450f-400f-8b12-0355d274a9c0","Type":"ContainerDied","Data":"ecb77a7620f434f0b39bafd3b6e1f696c76cd3319d10078e1073461666c09887"} Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.422568 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.423339 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9gvk\" (UniqueName: \"kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk\") pod \"0bedfe15-450f-400f-8b12-0355d274a9c0\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.423431 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config\") pod \"0bedfe15-450f-400f-8b12-0355d274a9c0\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.423460 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc\") pod \"0bedfe15-450f-400f-8b12-0355d274a9c0\" (UID: \"0bedfe15-450f-400f-8b12-0355d274a9c0\") " Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.424244 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnkql\" (UniqueName: \"kubernetes.io/projected/6deb14bc-bbe7-453b-9fb6-ec88ba84d463-kube-api-access-pnkql\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.425803 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config" (OuterVolumeSpecName: "config") pod "0bedfe15-450f-400f-8b12-0355d274a9c0" (UID: "0bedfe15-450f-400f-8b12-0355d274a9c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.425887 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0bedfe15-450f-400f-8b12-0355d274a9c0" (UID: "0bedfe15-450f-400f-8b12-0355d274a9c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.427687 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk" (OuterVolumeSpecName: "kube-api-access-l9gvk") pod "0bedfe15-450f-400f-8b12-0355d274a9c0" (UID: "0bedfe15-450f-400f-8b12-0355d274a9c0"). InnerVolumeSpecName "kube-api-access-l9gvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.477820 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.482773 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57958b85c7-pkl69"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.525982 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9gvk\" (UniqueName: \"kubernetes.io/projected/0bedfe15-450f-400f-8b12-0355d274a9c0-kube-api-access-l9gvk\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.526025 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.526034 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0bedfe15-450f-400f-8b12-0355d274a9c0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.606750 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:07 crc kubenswrapper[4712]: W0131 05:59:07.618443 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88bf6a45_3163_4035_acba_cff178fb1c61.slice/crio-8d826e4dba825c1027487b8118de6742ff98fd3270392b029722ab7a5c3878eb WatchSource:0}: Error finding container 8d826e4dba825c1027487b8118de6742ff98fd3270392b029722ab7a5c3878eb: Status 404 returned error can't find the container with id 8d826e4dba825c1027487b8118de6742ff98fd3270392b029722ab7a5c3878eb Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.620520 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:07 crc kubenswrapper[4712]: W0131 05:59:07.623148 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa6ae8de_5532_4ddf_bc62_92c38aa7bfa0.slice/crio-32e2109da768813f3e2198932468e90f349e339df05440990c735e2b9ab2e673 WatchSource:0}: Error finding container 32e2109da768813f3e2198932468e90f349e339df05440990c735e2b9ab2e673: Status 404 returned error can't find the container with id 32e2109da768813f3e2198932468e90f349e339df05440990c735e2b9ab2e673 Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.756344 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.761848 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d677d9b95-l5l5s"] Jan 31 05:59:07 crc kubenswrapper[4712]: I0131 05:59:07.979660 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 31 05:59:08 crc kubenswrapper[4712]: I0131 05:59:08.400233 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" event={"ID":"88bf6a45-3163-4035-acba-cff178fb1c61","Type":"ContainerStarted","Data":"8d826e4dba825c1027487b8118de6742ff98fd3270392b029722ab7a5c3878eb"} Jan 31 05:59:08 crc kubenswrapper[4712]: I0131 05:59:08.402617 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" event={"ID":"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0","Type":"ContainerStarted","Data":"32e2109da768813f3e2198932468e90f349e339df05440990c735e2b9ab2e673"} Jan 31 05:59:08 crc kubenswrapper[4712]: I0131 05:59:08.523726 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bedfe15-450f-400f-8b12-0355d274a9c0" path="/var/lib/kubelet/pods/0bedfe15-450f-400f-8b12-0355d274a9c0/volumes" Jan 31 05:59:08 crc kubenswrapper[4712]: I0131 05:59:08.524317 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6deb14bc-bbe7-453b-9fb6-ec88ba84d463" path="/var/lib/kubelet/pods/6deb14bc-bbe7-453b-9fb6-ec88ba84d463/volumes" Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.027063 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.421610 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.432032 4712 generic.go:334] "Generic (PLEG): container finished" podID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerID="1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef" exitCode=0 Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.432111 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" event={"ID":"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0","Type":"ContainerDied","Data":"1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef"} Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.433764 4712 generic.go:334] "Generic (PLEG): container finished" podID="88bf6a45-3163-4035-acba-cff178fb1c61" containerID="6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a" exitCode=0 Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.434516 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" event={"ID":"88bf6a45-3163-4035-acba-cff178fb1c61","Type":"ContainerDied","Data":"6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a"} Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.471152 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 31 05:59:10 crc kubenswrapper[4712]: I0131 05:59:10.501047 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.444814 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" event={"ID":"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0","Type":"ContainerStarted","Data":"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af"} Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.445388 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.447170 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c54792d1-2de9-4c85-a843-35d4b14dd8e4","Type":"ContainerStarted","Data":"1e9be6e2b51fcbe82d3caea9f37cfb08c952629557c13c280a59f2e7a83a236a"} Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.449480 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" event={"ID":"88bf6a45-3163-4035-acba-cff178fb1c61","Type":"ContainerStarted","Data":"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960"} Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.450089 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.472780 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" podStartSLOduration=3.184584656 podStartE2EDuration="5.472760929s" podCreationTimestamp="2026-01-31 05:59:06 +0000 UTC" firstStartedPulling="2026-01-31 05:59:07.625370699 +0000 UTC m=+1213.719252540" lastFinishedPulling="2026-01-31 05:59:09.913546962 +0000 UTC m=+1216.007428813" observedRunningTime="2026-01-31 05:59:11.465823559 +0000 UTC m=+1217.559705400" watchObservedRunningTime="2026-01-31 05:59:11.472760929 +0000 UTC m=+1217.566642770" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.503834 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.517009 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" podStartSLOduration=3.218937085 podStartE2EDuration="5.516982993s" podCreationTimestamp="2026-01-31 05:59:06 +0000 UTC" firstStartedPulling="2026-01-31 05:59:07.620662672 +0000 UTC m=+1213.714544513" lastFinishedPulling="2026-01-31 05:59:09.91870854 +0000 UTC m=+1216.012590421" observedRunningTime="2026-01-31 05:59:11.510205785 +0000 UTC m=+1217.604087626" watchObservedRunningTime="2026-01-31 05:59:11.516982993 +0000 UTC m=+1217.610864834" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.681699 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.689874 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.693247 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.693573 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-d22nt" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.693701 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.707031 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.710455 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816233 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-scripts\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816354 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816375 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816397 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816427 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816582 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mck7g\" (UniqueName: \"kubernetes.io/projected/2971f42d-daa7-474b-8d5e-06f5d943d091-kube-api-access-mck7g\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.816624 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-config\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919522 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mck7g\" (UniqueName: \"kubernetes.io/projected/2971f42d-daa7-474b-8d5e-06f5d943d091-kube-api-access-mck7g\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919615 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-config\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919718 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-scripts\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919820 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919868 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919907 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.919961 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.920900 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-config\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.921006 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2971f42d-daa7-474b-8d5e-06f5d943d091-scripts\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.921260 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.925792 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.926350 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.926931 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/2971f42d-daa7-474b-8d5e-06f5d943d091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:11 crc kubenswrapper[4712]: I0131 05:59:11.937319 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mck7g\" (UniqueName: \"kubernetes.io/projected/2971f42d-daa7-474b-8d5e-06f5d943d091-kube-api-access-mck7g\") pod \"ovn-northd-0\" (UID: \"2971f42d-daa7-474b-8d5e-06f5d943d091\") " pod="openstack/ovn-northd-0" Jan 31 05:59:12 crc kubenswrapper[4712]: I0131 05:59:12.014872 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 31 05:59:12 crc kubenswrapper[4712]: I0131 05:59:12.497427 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 31 05:59:13 crc kubenswrapper[4712]: I0131 05:59:13.474134 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2971f42d-daa7-474b-8d5e-06f5d943d091","Type":"ContainerStarted","Data":"81e3f19073c1f8e4ad89bbff0dff5a4187fd4e1fb35f562dd925e6e57c63bd8f"} Jan 31 05:59:15 crc kubenswrapper[4712]: I0131 05:59:15.491388 4712 generic.go:334] "Generic (PLEG): container finished" podID="9029816c-730b-4d38-9464-1d0ed936fc10" containerID="7f98be5a00acc9b7d515d285781a3c6d89ae123d83085761e08a8581604d9a4c" exitCode=0 Jan 31 05:59:15 crc kubenswrapper[4712]: I0131 05:59:15.491705 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9029816c-730b-4d38-9464-1d0ed936fc10","Type":"ContainerDied","Data":"7f98be5a00acc9b7d515d285781a3c6d89ae123d83085761e08a8581604d9a4c"} Jan 31 05:59:16 crc kubenswrapper[4712]: I0131 05:59:16.025722 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.093304 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.340508 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.403115 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.507957 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"5c874b7f-26e1-436e-9cdc-a440a86b72ec","Type":"ContainerStarted","Data":"6058b3b302cae93cafdbfd787428e24c39e753ffa0212f7d5ba208c29e18bdcb"} Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.508215 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.510877 4712 generic.go:334] "Generic (PLEG): container finished" podID="222305d6-dde8-43bd-801c-7420d0a05add" containerID="5c3e1b04afbe88890c55d460b68f4bda9bc5767a212eecb15e989aa2b1a34780" exitCode=0 Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.510915 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xzbjj" event={"ID":"222305d6-dde8-43bd-801c-7420d0a05add","Type":"ContainerDied","Data":"5c3e1b04afbe88890c55d460b68f4bda9bc5767a212eecb15e989aa2b1a34780"} Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.513319 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9029816c-730b-4d38-9464-1d0ed936fc10","Type":"ContainerStarted","Data":"d2628b25b4706a0f6b2b709f47bad822898b3fa3bba9a4e816abc30f3e015a20"} Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.513441 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="dnsmasq-dns" containerID="cri-o://098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960" gracePeriod=10 Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.542281 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.048692843 podStartE2EDuration="44.542254592s" podCreationTimestamp="2026-01-31 05:58:33 +0000 UTC" firstStartedPulling="2026-01-31 05:58:34.854906822 +0000 UTC m=+1180.948788663" lastFinishedPulling="2026-01-31 05:59:16.348468571 +0000 UTC m=+1222.442350412" observedRunningTime="2026-01-31 05:59:17.525248772 +0000 UTC m=+1223.619130613" watchObservedRunningTime="2026-01-31 05:59:17.542254592 +0000 UTC m=+1223.636136433" Jan 31 05:59:17 crc kubenswrapper[4712]: I0131 05:59:17.595735 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=18.467368973 podStartE2EDuration="45.595716665s" podCreationTimestamp="2026-01-31 05:58:32 +0000 UTC" firstStartedPulling="2026-01-31 05:58:34.592591675 +0000 UTC m=+1180.686473516" lastFinishedPulling="2026-01-31 05:59:01.720939367 +0000 UTC m=+1207.814821208" observedRunningTime="2026-01-31 05:59:17.586092407 +0000 UTC m=+1223.679974398" watchObservedRunningTime="2026-01-31 05:59:17.595716665 +0000 UTC m=+1223.689598506" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.519619 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.552426 4712 generic.go:334] "Generic (PLEG): container finished" podID="88bf6a45-3163-4035-acba-cff178fb1c61" containerID="098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960" exitCode=0 Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.552914 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" event={"ID":"88bf6a45-3163-4035-acba-cff178fb1c61","Type":"ContainerDied","Data":"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.552951 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" event={"ID":"88bf6a45-3163-4035-acba-cff178fb1c61","Type":"ContainerDied","Data":"8d826e4dba825c1027487b8118de6742ff98fd3270392b029722ab7a5c3878eb"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.552973 4712 scope.go:117] "RemoveContainer" containerID="098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.553166 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dccf88c6c-7jzjk" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.563154 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc\") pod \"88bf6a45-3163-4035-acba-cff178fb1c61\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.563342 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb\") pod \"88bf6a45-3163-4035-acba-cff178fb1c61\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.563388 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config\") pod \"88bf6a45-3163-4035-acba-cff178fb1c61\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.563511 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvr8p\" (UniqueName: \"kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p\") pod \"88bf6a45-3163-4035-acba-cff178fb1c61\" (UID: \"88bf6a45-3163-4035-acba-cff178fb1c61\") " Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.579601 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2971f42d-daa7-474b-8d5e-06f5d943d091","Type":"ContainerStarted","Data":"d533d325724406c26d25ceb85c36e1885a2e6262cde17977141dfc3b0070c780"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.579653 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"2971f42d-daa7-474b-8d5e-06f5d943d091","Type":"ContainerStarted","Data":"e10353813746a6c3ba704e8bf3c429b3847efa910aa91dd31706010e71d1b6a9"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.589982 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.612016 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xzbjj" event={"ID":"222305d6-dde8-43bd-801c-7420d0a05add","Type":"ContainerStarted","Data":"86dc9d2a32128a8883d6a74e531a911083016b1317b1e49f638d82df3a94a850"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.612057 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-xzbjj" event={"ID":"222305d6-dde8-43bd-801c-7420d0a05add","Type":"ContainerStarted","Data":"2c3471aaece4a06d7d269f261fdd89b86f4e18562c0be8b8187ee6e502074e46"} Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.612468 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.612501 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.633305 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.01633224 podStartE2EDuration="7.633281703s" podCreationTimestamp="2026-01-31 05:59:11 +0000 UTC" firstStartedPulling="2026-01-31 05:59:12.644425664 +0000 UTC m=+1218.738307505" lastFinishedPulling="2026-01-31 05:59:17.261375127 +0000 UTC m=+1223.355256968" observedRunningTime="2026-01-31 05:59:18.625973761 +0000 UTC m=+1224.719855602" watchObservedRunningTime="2026-01-31 05:59:18.633281703 +0000 UTC m=+1224.727163544" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.639657 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p" (OuterVolumeSpecName: "kube-api-access-jvr8p") pod "88bf6a45-3163-4035-acba-cff178fb1c61" (UID: "88bf6a45-3163-4035-acba-cff178fb1c61"). InnerVolumeSpecName "kube-api-access-jvr8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.670753 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvr8p\" (UniqueName: \"kubernetes.io/projected/88bf6a45-3163-4035-acba-cff178fb1c61-kube-api-access-jvr8p\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.676104 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88bf6a45-3163-4035-acba-cff178fb1c61" (UID: "88bf6a45-3163-4035-acba-cff178fb1c61"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.705134 4712 scope.go:117] "RemoveContainer" containerID="6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.710569 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-xzbjj" podStartSLOduration=8.763396009000001 podStartE2EDuration="39.710545214s" podCreationTimestamp="2026-01-31 05:58:39 +0000 UTC" firstStartedPulling="2026-01-31 05:58:45.400559677 +0000 UTC m=+1191.494441518" lastFinishedPulling="2026-01-31 05:59:16.347708872 +0000 UTC m=+1222.441590723" observedRunningTime="2026-01-31 05:59:18.70558039 +0000 UTC m=+1224.799462231" watchObservedRunningTime="2026-01-31 05:59:18.710545214 +0000 UTC m=+1224.804427055" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.716662 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config" (OuterVolumeSpecName: "config") pod "88bf6a45-3163-4035-acba-cff178fb1c61" (UID: "88bf6a45-3163-4035-acba-cff178fb1c61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.720411 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "88bf6a45-3163-4035-acba-cff178fb1c61" (UID: "88bf6a45-3163-4035-acba-cff178fb1c61"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.736509 4712 scope.go:117] "RemoveContainer" containerID="098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960" Jan 31 05:59:18 crc kubenswrapper[4712]: E0131 05:59:18.737095 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960\": container with ID starting with 098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960 not found: ID does not exist" containerID="098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.737134 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960"} err="failed to get container status \"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960\": rpc error: code = NotFound desc = could not find container \"098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960\": container with ID starting with 098a7bb414e34b2c3f06548164dfba52ca265258d9e5d13ee1034b9a521f7960 not found: ID does not exist" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.737166 4712 scope.go:117] "RemoveContainer" containerID="6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a" Jan 31 05:59:18 crc kubenswrapper[4712]: E0131 05:59:18.737479 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a\": container with ID starting with 6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a not found: ID does not exist" containerID="6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.737511 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a"} err="failed to get container status \"6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a\": rpc error: code = NotFound desc = could not find container \"6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a\": container with ID starting with 6bdfa7c337e0cfa045d2a3423a88a9cb01d4ed4c4fb1efeaab6763a87148421a not found: ID does not exist" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.774998 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.775065 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.775084 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88bf6a45-3163-4035-acba-cff178fb1c61-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.890316 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:18 crc kubenswrapper[4712]: I0131 05:59:18.897623 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dccf88c6c-7jzjk"] Jan 31 05:59:20 crc kubenswrapper[4712]: I0131 05:59:20.527454 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" path="/var/lib/kubelet/pods/88bf6a45-3163-4035-acba-cff178fb1c61/volumes" Jan 31 05:59:23 crc kubenswrapper[4712]: I0131 05:59:23.742147 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 31 05:59:23 crc kubenswrapper[4712]: I0131 05:59:23.742546 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 31 05:59:23 crc kubenswrapper[4712]: I0131 05:59:23.913744 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.110697 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.184867 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.682518 4712 generic.go:334] "Generic (PLEG): container finished" podID="c54792d1-2de9-4c85-a843-35d4b14dd8e4" containerID="1e9be6e2b51fcbe82d3caea9f37cfb08c952629557c13c280a59f2e7a83a236a" exitCode=0 Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.683581 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c54792d1-2de9-4c85-a843-35d4b14dd8e4","Type":"ContainerDied","Data":"1e9be6e2b51fcbe82d3caea9f37cfb08c952629557c13c280a59f2e7a83a236a"} Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.962660 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 05:59:25 crc kubenswrapper[4712]: E0131 05:59:25.964282 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="init" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.964298 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="init" Jan 31 05:59:25 crc kubenswrapper[4712]: E0131 05:59:25.965246 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="dnsmasq-dns" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.965259 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="dnsmasq-dns" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.965856 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="88bf6a45-3163-4035-acba-cff178fb1c61" containerName="dnsmasq-dns" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.967017 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:25 crc kubenswrapper[4712]: I0131 05:59:25.990694 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.023314 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brn64\" (UniqueName: \"kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.023440 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.023491 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.023544 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.023562 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.125768 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.125880 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.125943 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.125973 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.126020 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brn64\" (UniqueName: \"kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.126961 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.127014 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.127263 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.127560 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.145308 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brn64\" (UniqueName: \"kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64\") pod \"dnsmasq-dns-665586dd7c-2w2wq\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.298163 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.693899 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c54792d1-2de9-4c85-a843-35d4b14dd8e4","Type":"ContainerStarted","Data":"0ead9dc436a7465699e3ce0336b50270f6034f734cbd5c9a3788ee10adbddf18"} Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.723795 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371980.131 podStartE2EDuration="56.723775273s" podCreationTimestamp="2026-01-31 05:58:30 +0000 UTC" firstStartedPulling="2026-01-31 05:58:33.027136493 +0000 UTC m=+1179.121018334" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:26.715066278 +0000 UTC m=+1232.808948119" watchObservedRunningTime="2026-01-31 05:59:26.723775273 +0000 UTC m=+1232.817657114" Jan 31 05:59:26 crc kubenswrapper[4712]: I0131 05:59:26.806011 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 05:59:26 crc kubenswrapper[4712]: W0131 05:59:26.813330 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd72aff95_0c44_495c_8fa5_d4a4ef3aa765.slice/crio-2452862451a153a61b78baf4a3e68b382ff40b8a62a2eb3487fa3b1d7f85b2e3 WatchSource:0}: Error finding container 2452862451a153a61b78baf4a3e68b382ff40b8a62a2eb3487fa3b1d7f85b2e3: Status 404 returned error can't find the container with id 2452862451a153a61b78baf4a3e68b382ff40b8a62a2eb3487fa3b1d7f85b2e3 Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.095254 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.102605 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.110364 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.110408 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-ghtrg" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.110485 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.115277 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.118724 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253364 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-cache\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253418 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxt7t\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-kube-api-access-jxt7t\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253535 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253585 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-lock\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253610 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.253866 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/526325aa-f517-45ab-b0d3-b7285ef8db7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356361 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-cache\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356433 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxt7t\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-kube-api-access-jxt7t\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356468 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356519 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-lock\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356545 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356613 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/526325aa-f517-45ab-b0d3-b7285ef8db7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.356695 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.356728 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.356801 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:27.856778787 +0000 UTC m=+1233.950660798 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356937 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-cache\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.356981 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/526325aa-f517-45ab-b0d3-b7285ef8db7b-lock\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.357020 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.363777 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/526325aa-f517-45ab-b0d3-b7285ef8db7b-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.383090 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxt7t\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-kube-api-access-jxt7t\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.396844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.733352 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" event={"ID":"d72aff95-0c44-495c-8fa5-d4a4ef3aa765","Type":"ContainerDied","Data":"2c588e139bf82831700b59759d4bcbaeb33d22ce1f0a4e4f9ba6703420f314bd"} Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.733816 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-xnfzc"] Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.735407 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.735020 4712 generic.go:334] "Generic (PLEG): container finished" podID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerID="2c588e139bf82831700b59759d4bcbaeb33d22ce1f0a4e4f9ba6703420f314bd" exitCode=0 Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.737768 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" event={"ID":"d72aff95-0c44-495c-8fa5-d4a4ef3aa765","Type":"ContainerStarted","Data":"2452862451a153a61b78baf4a3e68b382ff40b8a62a2eb3487fa3b1d7f85b2e3"} Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.738095 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.738114 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.738443 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.755360 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-xnfzc"] Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.863795 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.863891 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.864138 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.864186 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: E0131 05:59:27.864243 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:28.864224215 +0000 UTC m=+1234.958106056 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864327 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864423 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864452 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864533 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864565 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkhvm\" (UniqueName: \"kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.864697 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966186 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966306 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966386 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966413 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966441 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966472 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkhvm\" (UniqueName: \"kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.966518 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.967227 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.967285 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.967595 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.971527 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.971683 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.972436 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:27 crc kubenswrapper[4712]: I0131 05:59:27.991213 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkhvm\" (UniqueName: \"kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm\") pod \"swift-ring-rebalance-xnfzc\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.129065 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:28 crc kubenswrapper[4712]: W0131 05:59:28.392312 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod108e0e4f_9137_4e8c_aec6_032c1585852c.slice/crio-b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2 WatchSource:0}: Error finding container b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2: Status 404 returned error can't find the container with id b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2 Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.392534 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-xnfzc"] Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.746916 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xnfzc" event={"ID":"108e0e4f-9137-4e8c-aec6-032c1585852c","Type":"ContainerStarted","Data":"b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2"} Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.751258 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" event={"ID":"d72aff95-0c44-495c-8fa5-d4a4ef3aa765","Type":"ContainerStarted","Data":"a3e0aae95726a49c7292184cf8f574f430ed94a4b288cbb06cb8060e4b5762e3"} Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.752938 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.772293 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" podStartSLOduration=3.772267871 podStartE2EDuration="3.772267871s" podCreationTimestamp="2026-01-31 05:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:28.768503947 +0000 UTC m=+1234.862385788" watchObservedRunningTime="2026-01-31 05:59:28.772267871 +0000 UTC m=+1234.866149712" Jan 31 05:59:28 crc kubenswrapper[4712]: I0131 05:59:28.886408 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:28 crc kubenswrapper[4712]: E0131 05:59:28.886619 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:28 crc kubenswrapper[4712]: E0131 05:59:28.886655 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:28 crc kubenswrapper[4712]: E0131 05:59:28.886726 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:30.886703721 +0000 UTC m=+1236.980585562 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:30 crc kubenswrapper[4712]: I0131 05:59:30.923338 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:30 crc kubenswrapper[4712]: E0131 05:59:30.923938 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:30 crc kubenswrapper[4712]: E0131 05:59:30.925210 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:30 crc kubenswrapper[4712]: E0131 05:59:30.925265 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:34.925247773 +0000 UTC m=+1241.019129614 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.078460 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.353874 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.354324 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.447065 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rsj7n"] Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.448647 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.451401 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.454844 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rsj7n"] Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.558406 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb56x\" (UniqueName: \"kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.558475 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.660605 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb56x\" (UniqueName: \"kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.660705 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.663690 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.683041 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb56x\" (UniqueName: \"kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x\") pod \"root-account-create-update-rsj7n\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:32 crc kubenswrapper[4712]: I0131 05:59:32.773122 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:33 crc kubenswrapper[4712]: I0131 05:59:33.196859 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 31 05:59:33 crc kubenswrapper[4712]: I0131 05:59:33.860494 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.073182 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-c9pcv"] Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.074306 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.084358 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-c9pcv"] Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.163597 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5785-account-create-update-bkvrb"] Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.164976 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.168554 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.186538 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5785-account-create-update-bkvrb"] Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.200049 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.200108 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4pjt\" (UniqueName: \"kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.302369 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.302452 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhprk\" (UniqueName: \"kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.302544 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.302578 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4pjt\" (UniqueName: \"kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.303410 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.335726 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4pjt\" (UniqueName: \"kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt\") pod \"glance-db-create-c9pcv\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.398469 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.404411 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.404532 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhprk\" (UniqueName: \"kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.405111 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.423496 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhprk\" (UniqueName: \"kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk\") pod \"glance-5785-account-create-update-bkvrb\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:34 crc kubenswrapper[4712]: I0131 05:59:34.489826 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:35 crc kubenswrapper[4712]: I0131 05:59:35.019092 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:35 crc kubenswrapper[4712]: E0131 05:59:35.019301 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:35 crc kubenswrapper[4712]: E0131 05:59:35.019320 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:35 crc kubenswrapper[4712]: E0131 05:59:35.019379 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:43.019361316 +0000 UTC m=+1249.113243157 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:36 crc kubenswrapper[4712]: I0131 05:59:36.301417 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 05:59:36 crc kubenswrapper[4712]: I0131 05:59:36.365450 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:36 crc kubenswrapper[4712]: I0131 05:59:36.365754 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="dnsmasq-dns" containerID="cri-o://90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af" gracePeriod=10 Jan 31 05:59:36 crc kubenswrapper[4712]: I0131 05:59:36.806276 4712 generic.go:334] "Generic (PLEG): container finished" podID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerID="cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7" exitCode=0 Jan 31 05:59:36 crc kubenswrapper[4712]: I0131 05:59:36.806345 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerDied","Data":"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.497514 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.591594 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc\") pod \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.591722 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb\") pod \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.591832 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb\") pod \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.591910 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config\") pod \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.591954 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hpf5\" (UniqueName: \"kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5\") pod \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\" (UID: \"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0\") " Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.597711 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5" (OuterVolumeSpecName: "kube-api-access-6hpf5") pod "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" (UID: "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0"). InnerVolumeSpecName "kube-api-access-6hpf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.638780 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" (UID: "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.651903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" (UID: "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.668399 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" (UID: "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.668819 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config" (OuterVolumeSpecName: "config") pod "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" (UID: "aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.694251 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-config\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.694288 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hpf5\" (UniqueName: \"kubernetes.io/projected/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-kube-api-access-6hpf5\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.694300 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.694311 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.694322 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.720811 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5785-account-create-update-bkvrb"] Jan 31 05:59:37 crc kubenswrapper[4712]: W0131 05:59:37.721014 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfef33d31_f3fd_4d92_8510_520b0d7517de.slice/crio-9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c WatchSource:0}: Error finding container 9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c: Status 404 returned error can't find the container with id 9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.733750 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-c9pcv"] Jan 31 05:59:37 crc kubenswrapper[4712]: W0131 05:59:37.737672 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84dacb4f_7fb4_47fb_a405_5550d56c54c6.slice/crio-4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23 WatchSource:0}: Error finding container 4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23: Status 404 returned error can't find the container with id 4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23 Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.743010 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rsj7n"] Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.819217 4712 generic.go:334] "Generic (PLEG): container finished" podID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerID="90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af" exitCode=0 Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.819297 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" event={"ID":"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0","Type":"ContainerDied","Data":"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.819740 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" event={"ID":"aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0","Type":"ContainerDied","Data":"32e2109da768813f3e2198932468e90f349e339df05440990c735e2b9ab2e673"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.819763 4712 scope.go:117] "RemoveContainer" containerID="90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.819314 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.822380 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xnfzc" event={"ID":"108e0e4f-9137-4e8c-aec6-032c1585852c","Type":"ContainerStarted","Data":"29c42a0450a9e6a4b028dfdbc2b19ab44b7fe563e31fb0653cc71924ed96d18b"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.824457 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5785-account-create-update-bkvrb" event={"ID":"fef33d31-f3fd-4d92-8510-520b0d7517de","Type":"ContainerStarted","Data":"9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.828971 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c9pcv" event={"ID":"84dacb4f-7fb4-47fb-a405-5550d56c54c6","Type":"ContainerStarted","Data":"4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.833747 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerStarted","Data":"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.834015 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.836611 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rsj7n" event={"ID":"978b4d4f-9f52-4ffe-95ad-159e6cd39062","Type":"ContainerStarted","Data":"a377507bc600b540a533bedaf8db2942e194f16402c1c33d64a1a3eab09b3c1b"} Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.851654 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-xnfzc" podStartSLOduration=2.029269077 podStartE2EDuration="10.851631677s" podCreationTimestamp="2026-01-31 05:59:27 +0000 UTC" firstStartedPulling="2026-01-31 05:59:28.395393751 +0000 UTC m=+1234.489275592" lastFinishedPulling="2026-01-31 05:59:37.217756351 +0000 UTC m=+1243.311638192" observedRunningTime="2026-01-31 05:59:37.848010527 +0000 UTC m=+1243.941892368" watchObservedRunningTime="2026-01-31 05:59:37.851631677 +0000 UTC m=+1243.945513518" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.866637 4712 scope.go:117] "RemoveContainer" containerID="1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.881393 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=41.936447756 podStartE2EDuration="1m8.881372502s" podCreationTimestamp="2026-01-31 05:58:29 +0000 UTC" firstStartedPulling="2026-01-31 05:58:32.126237874 +0000 UTC m=+1178.220119715" lastFinishedPulling="2026-01-31 05:58:59.07116262 +0000 UTC m=+1205.165044461" observedRunningTime="2026-01-31 05:59:37.87723741 +0000 UTC m=+1243.971119261" watchObservedRunningTime="2026-01-31 05:59:37.881372502 +0000 UTC m=+1243.975254343" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.898749 4712 scope.go:117] "RemoveContainer" containerID="90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af" Jan 31 05:59:37 crc kubenswrapper[4712]: E0131 05:59:37.900238 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af\": container with ID starting with 90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af not found: ID does not exist" containerID="90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.900276 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af"} err="failed to get container status \"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af\": rpc error: code = NotFound desc = could not find container \"90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af\": container with ID starting with 90202a679ef4d34a544dc096175991f48b82b5ad8bddc70240b7294a4df8c9af not found: ID does not exist" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.900305 4712 scope.go:117] "RemoveContainer" containerID="1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.900386 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:37 crc kubenswrapper[4712]: E0131 05:59:37.900926 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef\": container with ID starting with 1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef not found: ID does not exist" containerID="1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.900995 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef"} err="failed to get container status \"1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef\": rpc error: code = NotFound desc = could not find container \"1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef\": container with ID starting with 1e287f0581c457426b9252cec56034638de404e9d7adc6bb4c614ad1fe78b8ef not found: ID does not exist" Jan 31 05:59:37 crc kubenswrapper[4712]: I0131 05:59:37.906186 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b8496b6f5-h889x"] Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.512637 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" path="/var/lib/kubelet/pods/aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0/volumes" Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.844189 4712 generic.go:334] "Generic (PLEG): container finished" podID="fef33d31-f3fd-4d92-8510-520b0d7517de" containerID="18d8bf83d8cfdc0f92ec0529fa02b2b9e5c064dc7daf588f5ac3c586cb2862ad" exitCode=0 Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.844409 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5785-account-create-update-bkvrb" event={"ID":"fef33d31-f3fd-4d92-8510-520b0d7517de","Type":"ContainerDied","Data":"18d8bf83d8cfdc0f92ec0529fa02b2b9e5c064dc7daf588f5ac3c586cb2862ad"} Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.846133 4712 generic.go:334] "Generic (PLEG): container finished" podID="84dacb4f-7fb4-47fb-a405-5550d56c54c6" containerID="b92aee5d221fdcd20dc0d76dd0ceb4994e8f12904ac4c6ccbdbcd586c7db175f" exitCode=0 Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.846221 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c9pcv" event={"ID":"84dacb4f-7fb4-47fb-a405-5550d56c54c6","Type":"ContainerDied","Data":"b92aee5d221fdcd20dc0d76dd0ceb4994e8f12904ac4c6ccbdbcd586c7db175f"} Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.848037 4712 generic.go:334] "Generic (PLEG): container finished" podID="978b4d4f-9f52-4ffe-95ad-159e6cd39062" containerID="4f6934af4111a29ecd434319d69bb3c8c1dbdff0c730c950b5d033cca184deff" exitCode=0 Jan 31 05:59:38 crc kubenswrapper[4712]: I0131 05:59:38.848132 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rsj7n" event={"ID":"978b4d4f-9f52-4ffe-95ad-159e6cd39062","Type":"ContainerDied","Data":"4f6934af4111a29ecd434319d69bb3c8c1dbdff0c730c950b5d033cca184deff"} Jan 31 05:59:39 crc kubenswrapper[4712]: I0131 05:59:39.654547 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-xbh95" podUID="16fc3ee9-9ecc-45b7-8410-d9a6b2da5863" containerName="ovn-controller" probeResult="failure" output=< Jan 31 05:59:39 crc kubenswrapper[4712]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 31 05:59:39 crc kubenswrapper[4712]: > Jan 31 05:59:39 crc kubenswrapper[4712]: I0131 05:59:39.860186 4712 generic.go:334] "Generic (PLEG): container finished" podID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerID="01388726fb02d90b2eb560005e90cf645b9e2dd51d9efcd0b3815fa84335cf2c" exitCode=0 Jan 31 05:59:39 crc kubenswrapper[4712]: I0131 05:59:39.860276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerDied","Data":"01388726fb02d90b2eb560005e90cf645b9e2dd51d9efcd0b3815fa84335cf2c"} Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.289901 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.362695 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.384063 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.446761 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhprk\" (UniqueName: \"kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk\") pod \"fef33d31-f3fd-4d92-8510-520b0d7517de\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.446947 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4pjt\" (UniqueName: \"kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt\") pod \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.446996 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts\") pod \"fef33d31-f3fd-4d92-8510-520b0d7517de\" (UID: \"fef33d31-f3fd-4d92-8510-520b0d7517de\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.447027 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts\") pod \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\" (UID: \"84dacb4f-7fb4-47fb-a405-5550d56c54c6\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.449814 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fef33d31-f3fd-4d92-8510-520b0d7517de" (UID: "fef33d31-f3fd-4d92-8510-520b0d7517de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.450044 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "84dacb4f-7fb4-47fb-a405-5550d56c54c6" (UID: "84dacb4f-7fb4-47fb-a405-5550d56c54c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.454942 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk" (OuterVolumeSpecName: "kube-api-access-fhprk") pod "fef33d31-f3fd-4d92-8510-520b0d7517de" (UID: "fef33d31-f3fd-4d92-8510-520b0d7517de"). InnerVolumeSpecName "kube-api-access-fhprk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.455550 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt" (OuterVolumeSpecName: "kube-api-access-f4pjt") pod "84dacb4f-7fb4-47fb-a405-5550d56c54c6" (UID: "84dacb4f-7fb4-47fb-a405-5550d56c54c6"). InnerVolumeSpecName "kube-api-access-f4pjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.551995 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb56x\" (UniqueName: \"kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x\") pod \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.552524 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts\") pod \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\" (UID: \"978b4d4f-9f52-4ffe-95ad-159e6cd39062\") " Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.553549 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "978b4d4f-9f52-4ffe-95ad-159e6cd39062" (UID: "978b4d4f-9f52-4ffe-95ad-159e6cd39062"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.554738 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhprk\" (UniqueName: \"kubernetes.io/projected/fef33d31-f3fd-4d92-8510-520b0d7517de-kube-api-access-fhprk\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.554765 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4pjt\" (UniqueName: \"kubernetes.io/projected/84dacb4f-7fb4-47fb-a405-5550d56c54c6-kube-api-access-f4pjt\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.554780 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fef33d31-f3fd-4d92-8510-520b0d7517de-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.554792 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84dacb4f-7fb4-47fb-a405-5550d56c54c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.554804 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/978b4d4f-9f52-4ffe-95ad-159e6cd39062-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.558371 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x" (OuterVolumeSpecName: "kube-api-access-gb56x") pod "978b4d4f-9f52-4ffe-95ad-159e6cd39062" (UID: "978b4d4f-9f52-4ffe-95ad-159e6cd39062"). InnerVolumeSpecName "kube-api-access-gb56x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.657726 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb56x\" (UniqueName: \"kubernetes.io/projected/978b4d4f-9f52-4ffe-95ad-159e6cd39062-kube-api-access-gb56x\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.869064 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rsj7n" event={"ID":"978b4d4f-9f52-4ffe-95ad-159e6cd39062","Type":"ContainerDied","Data":"a377507bc600b540a533bedaf8db2942e194f16402c1c33d64a1a3eab09b3c1b"} Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.869116 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a377507bc600b540a533bedaf8db2942e194f16402c1c33d64a1a3eab09b3c1b" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.869208 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rsj7n" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.878418 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerStarted","Data":"7fbb1c5aef134e7040b0b7f309f2f23106e23f200a2d5fe27734ee1961e8e9f7"} Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.879423 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.880885 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5785-account-create-update-bkvrb" event={"ID":"fef33d31-f3fd-4d92-8510-520b0d7517de","Type":"ContainerDied","Data":"9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c"} Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.880910 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9096384acc5797edae5108cff52cd0d6a645d530cea9a18c15a28bf861b86e1c" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.880952 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5785-account-create-update-bkvrb" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.882536 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c9pcv" event={"ID":"84dacb4f-7fb4-47fb-a405-5550d56c54c6","Type":"ContainerDied","Data":"4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23"} Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.882565 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e4754f9fa5ec7c057cc874d28f997b1fc2643825641d422e7abbdaa18baba23" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.882606 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c9pcv" Jan 31 05:59:40 crc kubenswrapper[4712]: I0131 05:59:40.906151 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=41.24114145 podStartE2EDuration="1m10.906130521s" podCreationTimestamp="2026-01-31 05:58:30 +0000 UTC" firstStartedPulling="2026-01-31 05:58:32.056990161 +0000 UTC m=+1178.150872002" lastFinishedPulling="2026-01-31 05:59:01.721979232 +0000 UTC m=+1207.815861073" observedRunningTime="2026-01-31 05:59:40.903499926 +0000 UTC m=+1246.997381777" watchObservedRunningTime="2026-01-31 05:59:40.906130521 +0000 UTC m=+1247.000012362" Jan 31 05:59:42 crc kubenswrapper[4712]: I0131 05:59:42.339161 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b8496b6f5-h889x" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: i/o timeout" Jan 31 05:59:42 crc kubenswrapper[4712]: I0131 05:59:42.497649 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 05:59:42 crc kubenswrapper[4712]: I0131 05:59:42.497715 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.102728 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.102987 4712 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.103034 4712 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.103091 4712 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift podName:526325aa-f517-45ab-b0d3-b7285ef8db7b nodeName:}" failed. No retries permitted until 2026-01-31 05:59:59.10307026 +0000 UTC m=+1265.196952101 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift") pod "swift-storage-0" (UID: "526325aa-f517-45ab-b0d3-b7285ef8db7b") : configmap "swift-ring-files" not found Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.538432 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-b4ph9"] Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.539924 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="dnsmasq-dns" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.539975 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="dnsmasq-dns" Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.540034 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef33d31-f3fd-4d92-8510-520b0d7517de" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540045 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef33d31-f3fd-4d92-8510-520b0d7517de" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.540062 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="init" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540072 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="init" Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.540095 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="978b4d4f-9f52-4ffe-95ad-159e6cd39062" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540105 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="978b4d4f-9f52-4ffe-95ad-159e6cd39062" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: E0131 05:59:43.540129 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84dacb4f-7fb4-47fb-a405-5550d56c54c6" containerName="mariadb-database-create" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540140 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="84dacb4f-7fb4-47fb-a405-5550d56c54c6" containerName="mariadb-database-create" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540408 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa6ae8de-5532-4ddf-bc62-92c38aa7bfa0" containerName="dnsmasq-dns" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540439 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef33d31-f3fd-4d92-8510-520b0d7517de" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540458 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="978b4d4f-9f52-4ffe-95ad-159e6cd39062" containerName="mariadb-account-create-update" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.540470 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="84dacb4f-7fb4-47fb-a405-5550d56c54c6" containerName="mariadb-database-create" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.541154 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.555510 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b4ph9"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.565085 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8e51-account-create-update-psnbh"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.566143 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.575684 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.596625 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8e51-account-create-update-psnbh"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.713849 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.713965 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pghsb\" (UniqueName: \"kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.714045 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44j75\" (UniqueName: \"kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.714157 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.817907 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44j75\" (UniqueName: \"kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.818129 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.818219 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.818267 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pghsb\" (UniqueName: \"kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.820090 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.820249 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.851425 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-nkvdt"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.856855 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.861790 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44j75\" (UniqueName: \"kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75\") pod \"keystone-db-create-b4ph9\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.861864 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pghsb\" (UniqueName: \"kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb\") pod \"keystone-8e51-account-create-update-psnbh\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.866522 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nkvdt"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.889093 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.944912 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8a1d-account-create-update-9mm2b"] Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.948654 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.954081 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 31 05:59:43 crc kubenswrapper[4712]: I0131 05:59:43.954822 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8a1d-account-create-update-9mm2b"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.028414 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2n9t\" (UniqueName: \"kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.028726 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.130304 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.130397 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.130489 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld7w6\" (UniqueName: \"kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.130521 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2n9t\" (UniqueName: \"kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.133612 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.156780 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2n9t\" (UniqueName: \"kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t\") pod \"placement-db-create-nkvdt\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.159075 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.231587 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.232100 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld7w6\" (UniqueName: \"kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.234463 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.258677 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld7w6\" (UniqueName: \"kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6\") pod \"placement-8a1d-account-create-update-9mm2b\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.332044 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.343967 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.374351 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-qp52f"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.375510 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.381012 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hlkbc" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.381360 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.391956 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qp52f"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.441811 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8e51-account-create-update-psnbh"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.536717 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.536780 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.536809 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.536914 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l56vp\" (UniqueName: \"kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.638468 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.640205 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.640465 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l56vp\" (UniqueName: \"kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.640559 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.649774 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.649861 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.650428 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.659199 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l56vp\" (UniqueName: \"kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp\") pod \"glance-db-sync-qp52f\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.684912 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-xbh95" podUID="16fc3ee9-9ecc-45b7-8410-d9a6b2da5863" containerName="ovn-controller" probeResult="failure" output=< Jan 31 05:59:44 crc kubenswrapper[4712]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 31 05:59:44 crc kubenswrapper[4712]: > Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.709005 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b4ph9"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.734739 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qp52f" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.853101 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nkvdt"] Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.930098 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8e51-account-create-update-psnbh" event={"ID":"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c","Type":"ContainerStarted","Data":"1e763a437a98264799baa28a2d2be454f1da0e8c5b139ee5d3fec05fd6a2e138"} Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.930251 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8e51-account-create-update-psnbh" event={"ID":"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c","Type":"ContainerStarted","Data":"5906ff1f03b675c863ec3294aeb6e6733bbabf5c8e07ec4729c1b1fc25ef5b44"} Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.938124 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b4ph9" event={"ID":"80b022b2-f7c3-489e-832a-116b9d4edbf7","Type":"ContainerStarted","Data":"40cb4bc04c82a7fc9e2d4f5f8cd6482a65a37c5d8c9afd580242c932bc276d4e"} Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.938220 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b4ph9" event={"ID":"80b022b2-f7c3-489e-832a-116b9d4edbf7","Type":"ContainerStarted","Data":"02bd809109c7d9ebf750e313d57fb335ef0ce54ce2b82dd8c0df94b17ba6e76f"} Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.944853 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nkvdt" event={"ID":"1b0cb952-b4c3-4cb2-b420-72d735bf02f0","Type":"ContainerStarted","Data":"55c66803b6256b330a27badc3bc24cd2328f733c069c2a00fdb024f37649d22c"} Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.955149 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-8e51-account-create-update-psnbh" podStartSLOduration=1.95512854 podStartE2EDuration="1.95512854s" podCreationTimestamp="2026-01-31 05:59:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:44.952828512 +0000 UTC m=+1251.046710353" watchObservedRunningTime="2026-01-31 05:59:44.95512854 +0000 UTC m=+1251.049010371" Jan 31 05:59:44 crc kubenswrapper[4712]: I0131 05:59:44.998845 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-b4ph9" podStartSLOduration=1.99881295 podStartE2EDuration="1.99881295s" podCreationTimestamp="2026-01-31 05:59:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:44.990843913 +0000 UTC m=+1251.084725754" watchObservedRunningTime="2026-01-31 05:59:44.99881295 +0000 UTC m=+1251.092694791" Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.036480 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8a1d-account-create-update-9mm2b"] Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.449863 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qp52f"] Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.933386 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rsj7n"] Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.945911 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rsj7n"] Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.971922 4712 generic.go:334] "Generic (PLEG): container finished" podID="1b0cb952-b4c3-4cb2-b420-72d735bf02f0" containerID="94a6b9d9c3e02ef9bc54f44658a31997f8d9641e3979d1fa57594dff9c223dcd" exitCode=0 Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.972046 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nkvdt" event={"ID":"1b0cb952-b4c3-4cb2-b420-72d735bf02f0","Type":"ContainerDied","Data":"94a6b9d9c3e02ef9bc54f44658a31997f8d9641e3979d1fa57594dff9c223dcd"} Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.976501 4712 generic.go:334] "Generic (PLEG): container finished" podID="6b036491-3b39-44db-b9c8-f1ab0fc6034e" containerID="bbb1a0f36ad3d917b5eaf0a7c8df3030210841050bff90066b61814c48b0e3b5" exitCode=0 Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.976630 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8a1d-account-create-update-9mm2b" event={"ID":"6b036491-3b39-44db-b9c8-f1ab0fc6034e","Type":"ContainerDied","Data":"bbb1a0f36ad3d917b5eaf0a7c8df3030210841050bff90066b61814c48b0e3b5"} Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.976667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8a1d-account-create-update-9mm2b" event={"ID":"6b036491-3b39-44db-b9c8-f1ab0fc6034e","Type":"ContainerStarted","Data":"8540c4c9c784588c78028f826c21016a7d39321375267783c1a80a4b6bbdc7ae"} Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.987485 4712 generic.go:334] "Generic (PLEG): container finished" podID="108e0e4f-9137-4e8c-aec6-032c1585852c" containerID="29c42a0450a9e6a4b028dfdbc2b19ab44b7fe563e31fb0653cc71924ed96d18b" exitCode=0 Jan 31 05:59:45 crc kubenswrapper[4712]: I0131 05:59:45.987590 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xnfzc" event={"ID":"108e0e4f-9137-4e8c-aec6-032c1585852c","Type":"ContainerDied","Data":"29c42a0450a9e6a4b028dfdbc2b19ab44b7fe563e31fb0653cc71924ed96d18b"} Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:45.995709 4712 generic.go:334] "Generic (PLEG): container finished" podID="e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" containerID="1e763a437a98264799baa28a2d2be454f1da0e8c5b139ee5d3fec05fd6a2e138" exitCode=0 Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:45.995810 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8e51-account-create-update-psnbh" event={"ID":"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c","Type":"ContainerDied","Data":"1e763a437a98264799baa28a2d2be454f1da0e8c5b139ee5d3fec05fd6a2e138"} Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:45.997836 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qp52f" event={"ID":"a59772f8-fb2b-4ccd-80e3-de90890503d9","Type":"ContainerStarted","Data":"0aa195244466d7f3d266efda7f1a0ba81d117707dbe66ad583c50a5a4dfac77b"} Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:46.000911 4712 generic.go:334] "Generic (PLEG): container finished" podID="80b022b2-f7c3-489e-832a-116b9d4edbf7" containerID="40cb4bc04c82a7fc9e2d4f5f8cd6482a65a37c5d8c9afd580242c932bc276d4e" exitCode=0 Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:46.000985 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b4ph9" event={"ID":"80b022b2-f7c3-489e-832a-116b9d4edbf7","Type":"ContainerDied","Data":"40cb4bc04c82a7fc9e2d4f5f8cd6482a65a37c5d8c9afd580242c932bc276d4e"} Jan 31 05:59:46 crc kubenswrapper[4712]: I0131 05:59:46.516865 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="978b4d4f-9f52-4ffe-95ad-159e6cd39062" path="/var/lib/kubelet/pods/978b4d4f-9f52-4ffe-95ad-159e6cd39062/volumes" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.423558 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505258 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkhvm\" (UniqueName: \"kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505346 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505468 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505509 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505543 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505573 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.505592 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf\") pod \"108e0e4f-9137-4e8c-aec6-032c1585852c\" (UID: \"108e0e4f-9137-4e8c-aec6-032c1585852c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.514040 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.515136 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.525865 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm" (OuterVolumeSpecName: "kube-api-access-jkhvm") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "kube-api-access-jkhvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.537925 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts" (OuterVolumeSpecName: "scripts") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.542602 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.559597 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.584761 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "108e0e4f-9137-4e8c-aec6-032c1585852c" (UID: "108e0e4f-9137-4e8c-aec6-032c1585852c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.607938 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkhvm\" (UniqueName: \"kubernetes.io/projected/108e0e4f-9137-4e8c-aec6-032c1585852c-kube-api-access-jkhvm\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.607969 4712 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/108e0e4f-9137-4e8c-aec6-032c1585852c-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.607979 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.607991 4712 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.608001 4712 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/108e0e4f-9137-4e8c-aec6-032c1585852c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.608009 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.608016 4712 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/108e0e4f-9137-4e8c-aec6-032c1585852c-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.704523 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.707659 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.712819 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.731622 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.811099 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2n9t\" (UniqueName: \"kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t\") pod \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.811243 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts\") pod \"80b022b2-f7c3-489e-832a-116b9d4edbf7\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.811323 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts\") pod \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.811439 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts\") pod \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\" (UID: \"1b0cb952-b4c3-4cb2-b420-72d735bf02f0\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.812771 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1b0cb952-b4c3-4cb2-b420-72d735bf02f0" (UID: "1b0cb952-b4c3-4cb2-b420-72d735bf02f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.812994 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80b022b2-f7c3-489e-832a-116b9d4edbf7" (UID: "80b022b2-f7c3-489e-832a-116b9d4edbf7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.812989 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b036491-3b39-44db-b9c8-f1ab0fc6034e" (UID: "6b036491-3b39-44db-b9c8-f1ab0fc6034e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.813618 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld7w6\" (UniqueName: \"kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6\") pod \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\" (UID: \"6b036491-3b39-44db-b9c8-f1ab0fc6034e\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.813695 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pghsb\" (UniqueName: \"kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb\") pod \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.813735 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts\") pod \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\" (UID: \"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.813767 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44j75\" (UniqueName: \"kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75\") pod \"80b022b2-f7c3-489e-832a-116b9d4edbf7\" (UID: \"80b022b2-f7c3-489e-832a-116b9d4edbf7\") " Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.814436 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80b022b2-f7c3-489e-832a-116b9d4edbf7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.814468 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b036491-3b39-44db-b9c8-f1ab0fc6034e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.814475 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" (UID: "e7e2a723-54a1-42ac-8e8c-3ca125b6d01c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.814483 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.817303 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t" (OuterVolumeSpecName: "kube-api-access-m2n9t") pod "1b0cb952-b4c3-4cb2-b420-72d735bf02f0" (UID: "1b0cb952-b4c3-4cb2-b420-72d735bf02f0"). InnerVolumeSpecName "kube-api-access-m2n9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.819531 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6" (OuterVolumeSpecName: "kube-api-access-ld7w6") pod "6b036491-3b39-44db-b9c8-f1ab0fc6034e" (UID: "6b036491-3b39-44db-b9c8-f1ab0fc6034e"). InnerVolumeSpecName "kube-api-access-ld7w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.819599 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75" (OuterVolumeSpecName: "kube-api-access-44j75") pod "80b022b2-f7c3-489e-832a-116b9d4edbf7" (UID: "80b022b2-f7c3-489e-832a-116b9d4edbf7"). InnerVolumeSpecName "kube-api-access-44j75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.819619 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb" (OuterVolumeSpecName: "kube-api-access-pghsb") pod "e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" (UID: "e7e2a723-54a1-42ac-8e8c-3ca125b6d01c"). InnerVolumeSpecName "kube-api-access-pghsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.916613 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld7w6\" (UniqueName: \"kubernetes.io/projected/6b036491-3b39-44db-b9c8-f1ab0fc6034e-kube-api-access-ld7w6\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.916649 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pghsb\" (UniqueName: \"kubernetes.io/projected/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-kube-api-access-pghsb\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.916659 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.916668 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44j75\" (UniqueName: \"kubernetes.io/projected/80b022b2-f7c3-489e-832a-116b9d4edbf7-kube-api-access-44j75\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:47 crc kubenswrapper[4712]: I0131 05:59:47.916680 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2n9t\" (UniqueName: \"kubernetes.io/projected/1b0cb952-b4c3-4cb2-b420-72d735bf02f0-kube-api-access-m2n9t\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.025762 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8e51-account-create-update-psnbh" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.026467 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8e51-account-create-update-psnbh" event={"ID":"e7e2a723-54a1-42ac-8e8c-3ca125b6d01c","Type":"ContainerDied","Data":"5906ff1f03b675c863ec3294aeb6e6733bbabf5c8e07ec4729c1b1fc25ef5b44"} Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.026882 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5906ff1f03b675c863ec3294aeb6e6733bbabf5c8e07ec4729c1b1fc25ef5b44" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.031761 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b4ph9" event={"ID":"80b022b2-f7c3-489e-832a-116b9d4edbf7","Type":"ContainerDied","Data":"02bd809109c7d9ebf750e313d57fb335ef0ce54ce2b82dd8c0df94b17ba6e76f"} Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.031812 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02bd809109c7d9ebf750e313d57fb335ef0ce54ce2b82dd8c0df94b17ba6e76f" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.031835 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b4ph9" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.035907 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nkvdt" event={"ID":"1b0cb952-b4c3-4cb2-b420-72d735bf02f0","Type":"ContainerDied","Data":"55c66803b6256b330a27badc3bc24cd2328f733c069c2a00fdb024f37649d22c"} Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.035937 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55c66803b6256b330a27badc3bc24cd2328f733c069c2a00fdb024f37649d22c" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.036007 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nkvdt" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.041610 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8a1d-account-create-update-9mm2b" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.041570 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8a1d-account-create-update-9mm2b" event={"ID":"6b036491-3b39-44db-b9c8-f1ab0fc6034e","Type":"ContainerDied","Data":"8540c4c9c784588c78028f826c21016a7d39321375267783c1a80a4b6bbdc7ae"} Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.041821 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8540c4c9c784588c78028f826c21016a7d39321375267783c1a80a4b6bbdc7ae" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.044684 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xnfzc" event={"ID":"108e0e4f-9137-4e8c-aec6-032c1585852c","Type":"ContainerDied","Data":"b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2"} Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.044710 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5e77f8aa605c9cb597dbd051e8c8012967e335bd3c09eb4ca5c5b0cefbaeda2" Jan 31 05:59:48 crc kubenswrapper[4712]: I0131 05:59:48.044802 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xnfzc" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.675674 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-xbh95" podUID="16fc3ee9-9ecc-45b7-8410-d9a6b2da5863" containerName="ovn-controller" probeResult="failure" output=< Jan 31 05:59:49 crc kubenswrapper[4712]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 31 05:59:49 crc kubenswrapper[4712]: > Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.692158 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.704059 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-xzbjj" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.941669 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-xbh95-config-phln6"] Jan 31 05:59:49 crc kubenswrapper[4712]: E0131 05:59:49.942049 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="108e0e4f-9137-4e8c-aec6-032c1585852c" containerName="swift-ring-rebalance" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942064 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="108e0e4f-9137-4e8c-aec6-032c1585852c" containerName="swift-ring-rebalance" Jan 31 05:59:49 crc kubenswrapper[4712]: E0131 05:59:49.942085 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942091 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: E0131 05:59:49.942102 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0cb952-b4c3-4cb2-b420-72d735bf02f0" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942109 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0cb952-b4c3-4cb2-b420-72d735bf02f0" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: E0131 05:59:49.942136 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b036491-3b39-44db-b9c8-f1ab0fc6034e" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942144 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b036491-3b39-44db-b9c8-f1ab0fc6034e" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: E0131 05:59:49.942155 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b022b2-f7c3-489e-832a-116b9d4edbf7" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942205 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b022b2-f7c3-489e-832a-116b9d4edbf7" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942382 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b036491-3b39-44db-b9c8-f1ab0fc6034e" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942408 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="108e0e4f-9137-4e8c-aec6-032c1585852c" containerName="swift-ring-rebalance" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942415 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b0cb952-b4c3-4cb2-b420-72d735bf02f0" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942428 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b022b2-f7c3-489e-832a-116b9d4edbf7" containerName="mariadb-database-create" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942435 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" containerName="mariadb-account-create-update" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.942994 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.945357 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 31 05:59:49 crc kubenswrapper[4712]: I0131 05:59:49.963916 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95-config-phln6"] Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.059760 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p767\" (UniqueName: \"kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.059808 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.059853 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.059924 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.060024 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.060102 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.161856 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.161923 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.161984 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162422 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162424 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162512 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162632 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p767\" (UniqueName: \"kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162660 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162772 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.162828 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.164689 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.182608 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p767\" (UniqueName: \"kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767\") pod \"ovn-controller-xbh95-config-phln6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.263543 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.718793 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95-config-phln6"] Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.934530 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-ldrnk"] Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.935672 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.939007 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 31 05:59:50 crc kubenswrapper[4712]: I0131 05:59:50.944522 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-ldrnk"] Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.077374 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-phln6" event={"ID":"61eb882f-2dfd-42c1-897d-fecf20f598a6","Type":"ContainerStarted","Data":"b8aaaf4d0565327ce9874a56ac3ae1dcd1b198245b277d283fada05a2671bbf1"} Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.080794 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.081023 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtw9h\" (UniqueName: \"kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.183437 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.183586 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtw9h\" (UniqueName: \"kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.184375 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.211758 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtw9h\" (UniqueName: \"kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h\") pod \"root-account-create-update-ldrnk\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.295966 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.392531 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.504424 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.788979 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-667z8"] Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.804397 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-667z8" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.840854 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-667z8"] Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.920772 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwthb\" (UniqueName: \"kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.920914 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.930642 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8725-account-create-update-n2mlq"] Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.933977 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.941343 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 31 05:59:51 crc kubenswrapper[4712]: I0131 05:59:51.976010 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-ldrnk"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:51.993946 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8725-account-create-update-n2mlq"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.022062 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwhzn\" (UniqueName: \"kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.022130 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwthb\" (UniqueName: \"kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.022278 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.022310 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.023013 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.023097 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-72bxm"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.024349 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.043525 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-72bxm"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.050959 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwthb\" (UniqueName: \"kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb\") pod \"cinder-db-create-667z8\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " pod="openstack/cinder-db-create-667z8" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.051493 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-246d-account-create-update-zm6nr"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.052789 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.056838 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.065088 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-246d-account-create-update-zm6nr"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.104268 4712 generic.go:334] "Generic (PLEG): container finished" podID="61eb882f-2dfd-42c1-897d-fecf20f598a6" containerID="8fd86bc66e5b9d680d2db6f26ab4ec04eb24a9522cfe0d2e09698094fc1a9118" exitCode=0 Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.104653 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-phln6" event={"ID":"61eb882f-2dfd-42c1-897d-fecf20f598a6","Type":"ContainerDied","Data":"8fd86bc66e5b9d680d2db6f26ab4ec04eb24a9522cfe0d2e09698094fc1a9118"} Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.107459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-ldrnk" event={"ID":"907ab3c5-1c04-422a-ac94-a6179bd6e9ac","Type":"ContainerStarted","Data":"4617610c13208452cd5a4903cce93437a689bf5a5f9c0438e310fe288b20086a"} Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.124548 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcjnq\" (UniqueName: \"kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.124608 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.125390 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwhzn\" (UniqueName: \"kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.125470 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqkf\" (UniqueName: \"kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.125583 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.125621 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.127554 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.133591 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-19bb-account-create-update-stcpz"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.136848 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.139894 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.148308 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-19bb-account-create-update-stcpz"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.159731 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwhzn\" (UniqueName: \"kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn\") pod \"barbican-8725-account-create-update-n2mlq\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.169955 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-667z8" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.192645 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-g4wb5"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.193882 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.206205 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g4wb5"] Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.228431 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqlmw\" (UniqueName: \"kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.228704 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.228825 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.228947 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcjnq\" (UniqueName: \"kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.229077 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.229269 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqkf\" (UniqueName: \"kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.230560 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.231298 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.260762 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqkf\" (UniqueName: \"kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf\") pod \"barbican-db-create-72bxm\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.264874 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcjnq\" (UniqueName: \"kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq\") pod \"cinder-246d-account-create-update-zm6nr\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.308879 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.330384 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t9v9\" (UniqueName: \"kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.330504 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.330620 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqlmw\" (UniqueName: \"kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.330668 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.331333 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.350207 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.351804 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqlmw\" (UniqueName: \"kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw\") pod \"neutron-19bb-account-create-update-stcpz\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.388080 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.443554 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.443637 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t9v9\" (UniqueName: \"kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.444527 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.465088 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.475995 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t9v9\" (UniqueName: \"kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9\") pod \"neutron-db-create-g4wb5\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.506969 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.645111 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-667z8"] Jan 31 05:59:52 crc kubenswrapper[4712]: W0131 05:59:52.646221 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a2b25a7_db91_4e29_9d5e_6acaa45cc200.slice/crio-c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb WatchSource:0}: Error finding container c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb: Status 404 returned error can't find the container with id c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb Jan 31 05:59:52 crc kubenswrapper[4712]: I0131 05:59:52.997772 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-72bxm"] Jan 31 05:59:53 crc kubenswrapper[4712]: W0131 05:59:53.006334 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24b09fe5_c6dc_4a74_97ce_e4c2295cf6bc.slice/crio-08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667 WatchSource:0}: Error finding container 08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667: Status 404 returned error can't find the container with id 08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.125906 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8725-account-create-update-n2mlq"] Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.145131 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-246d-account-create-update-zm6nr"] Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.155036 4712 generic.go:334] "Generic (PLEG): container finished" podID="0a2b25a7-db91-4e29-9d5e-6acaa45cc200" containerID="bec24cfcc57afb714b2a7efdb30b61f60854939b8395c6689f8807c799c1755f" exitCode=0 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.155104 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-667z8" event={"ID":"0a2b25a7-db91-4e29-9d5e-6acaa45cc200","Type":"ContainerDied","Data":"bec24cfcc57afb714b2a7efdb30b61f60854939b8395c6689f8807c799c1755f"} Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.155285 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-667z8" event={"ID":"0a2b25a7-db91-4e29-9d5e-6acaa45cc200","Type":"ContainerStarted","Data":"c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb"} Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.158258 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-72bxm" event={"ID":"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc","Type":"ContainerStarted","Data":"08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667"} Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.160016 4712 generic.go:334] "Generic (PLEG): container finished" podID="907ab3c5-1c04-422a-ac94-a6179bd6e9ac" containerID="a47ee06fc8fead4f4e098e32c959ea6ba4e9d12300387f6c2b83372e66f2fa61" exitCode=0 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.160159 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-ldrnk" event={"ID":"907ab3c5-1c04-422a-ac94-a6179bd6e9ac","Type":"ContainerDied","Data":"a47ee06fc8fead4f4e098e32c959ea6ba4e9d12300387f6c2b83372e66f2fa61"} Jan 31 05:59:53 crc kubenswrapper[4712]: W0131 05:59:53.164824 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdbe92ef_e3d9_4528_bc92_ba7309daafe4.slice/crio-8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae WatchSource:0}: Error finding container 8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae: Status 404 returned error can't find the container with id 8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae Jan 31 05:59:53 crc kubenswrapper[4712]: W0131 05:59:53.166068 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47c8e6c2_dd63_4d15_8c59_f1aa014bdebe.slice/crio-616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79 WatchSource:0}: Error finding container 616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79: Status 404 returned error can't find the container with id 616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.262954 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-g4wb5"] Jan 31 05:59:53 crc kubenswrapper[4712]: W0131 05:59:53.277815 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a28a1ff_7c14_412e_9a7f_5a0018859762.slice/crio-cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9 WatchSource:0}: Error finding container cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9: Status 404 returned error can't find the container with id cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.279041 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-19bb-account-create-update-stcpz"] Jan 31 05:59:53 crc kubenswrapper[4712]: W0131 05:59:53.290695 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2998b9bb_f92a_427c_824a_80e66ff0643b.slice/crio-c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603 WatchSource:0}: Error finding container c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603: Status 404 returned error can't find the container with id c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603 Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.529006 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.678935 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.678998 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679050 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679207 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679240 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p767\" (UniqueName: \"kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679284 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679383 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run\") pod \"61eb882f-2dfd-42c1-897d-fecf20f598a6\" (UID: \"61eb882f-2dfd-42c1-897d-fecf20f598a6\") " Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679412 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.679520 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run" (OuterVolumeSpecName: "var-run") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680014 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680198 4712 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680224 4712 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680237 4712 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680248 4712 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/61eb882f-2dfd-42c1-897d-fecf20f598a6-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.680268 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts" (OuterVolumeSpecName: "scripts") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.689503 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767" (OuterVolumeSpecName: "kube-api-access-4p767") pod "61eb882f-2dfd-42c1-897d-fecf20f598a6" (UID: "61eb882f-2dfd-42c1-897d-fecf20f598a6"). InnerVolumeSpecName "kube-api-access-4p767". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.782389 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p767\" (UniqueName: \"kubernetes.io/projected/61eb882f-2dfd-42c1-897d-fecf20f598a6-kube-api-access-4p767\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:53 crc kubenswrapper[4712]: I0131 05:59:53.782434 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eb882f-2dfd-42c1-897d-fecf20f598a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.186469 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-phln6" event={"ID":"61eb882f-2dfd-42c1-897d-fecf20f598a6","Type":"ContainerDied","Data":"b8aaaf4d0565327ce9874a56ac3ae1dcd1b198245b277d283fada05a2671bbf1"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.186871 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8aaaf4d0565327ce9874a56ac3ae1dcd1b198245b277d283fada05a2671bbf1" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.186951 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-phln6" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.204886 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-246d-account-create-update-zm6nr" event={"ID":"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe","Type":"ContainerStarted","Data":"956833f5d936e29243b8676d025075d9b3ab4ce0a134b2649a294772c9ab1045"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.204937 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-246d-account-create-update-zm6nr" event={"ID":"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe","Type":"ContainerStarted","Data":"616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.207326 4712 generic.go:334] "Generic (PLEG): container finished" podID="24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" containerID="3aa786383ad64c6d2b633d4ad53991df3fef3f6c8ee1fdb8f001b69aaf4f98d6" exitCode=0 Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.207378 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-72bxm" event={"ID":"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc","Type":"ContainerDied","Data":"3aa786383ad64c6d2b633d4ad53991df3fef3f6c8ee1fdb8f001b69aaf4f98d6"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.214481 4712 generic.go:334] "Generic (PLEG): container finished" podID="2998b9bb-f92a-427c-824a-80e66ff0643b" containerID="0e072e19229f70590e4d2e4a27fb2530ab526dd4f489af075125ce90f3ce3ada" exitCode=0 Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.214580 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-19bb-account-create-update-stcpz" event={"ID":"2998b9bb-f92a-427c-824a-80e66ff0643b","Type":"ContainerDied","Data":"0e072e19229f70590e4d2e4a27fb2530ab526dd4f489af075125ce90f3ce3ada"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.214610 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-19bb-account-create-update-stcpz" event={"ID":"2998b9bb-f92a-427c-824a-80e66ff0643b","Type":"ContainerStarted","Data":"c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.222585 4712 generic.go:334] "Generic (PLEG): container finished" podID="fdbe92ef-e3d9-4528-bc92-ba7309daafe4" containerID="bd8d3151c6577f478585f23abf3ae1d5f9cd8a4b55e2e9c3708211ff631a0cc9" exitCode=0 Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.222657 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8725-account-create-update-n2mlq" event={"ID":"fdbe92ef-e3d9-4528-bc92-ba7309daafe4","Type":"ContainerDied","Data":"bd8d3151c6577f478585f23abf3ae1d5f9cd8a4b55e2e9c3708211ff631a0cc9"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.222681 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8725-account-create-update-n2mlq" event={"ID":"fdbe92ef-e3d9-4528-bc92-ba7309daafe4","Type":"ContainerStarted","Data":"8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.225819 4712 generic.go:334] "Generic (PLEG): container finished" podID="4a28a1ff-7c14-412e-9a7f-5a0018859762" containerID="af1f623d801e4c4eb1203c9af66039086856a7a3ab7159460911bfae59536649" exitCode=0 Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.226137 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g4wb5" event={"ID":"4a28a1ff-7c14-412e-9a7f-5a0018859762","Type":"ContainerDied","Data":"af1f623d801e4c4eb1203c9af66039086856a7a3ab7159460911bfae59536649"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.226209 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g4wb5" event={"ID":"4a28a1ff-7c14-412e-9a7f-5a0018859762","Type":"ContainerStarted","Data":"cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9"} Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.266026 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-246d-account-create-update-zm6nr" podStartSLOduration=3.2659925 podStartE2EDuration="3.2659925s" podCreationTimestamp="2026-01-31 05:59:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:54.257355477 +0000 UTC m=+1260.351237318" watchObservedRunningTime="2026-01-31 05:59:54.2659925 +0000 UTC m=+1260.359874341" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.579547 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-mngsk"] Jan 31 05:59:54 crc kubenswrapper[4712]: E0131 05:59:54.580206 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61eb882f-2dfd-42c1-897d-fecf20f598a6" containerName="ovn-config" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.580219 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="61eb882f-2dfd-42c1-897d-fecf20f598a6" containerName="ovn-config" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.580400 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="61eb882f-2dfd-42c1-897d-fecf20f598a6" containerName="ovn-config" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.580977 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.613393 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.613658 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.613681 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kdwj2" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.613955 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.687246 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-xbh95" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.709137 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.721300 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk5r6\" (UniqueName: \"kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.721572 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.721658 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.756480 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mngsk"] Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.774925 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-xbh95-config-phln6"] Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.783780 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-xbh95-config-phln6"] Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.823218 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts\") pod \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.823371 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtw9h\" (UniqueName: \"kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h\") pod \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\" (UID: \"907ab3c5-1c04-422a-ac94-a6179bd6e9ac\") " Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.823634 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk5r6\" (UniqueName: \"kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.824234 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.824442 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.824512 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "907ab3c5-1c04-422a-ac94-a6179bd6e9ac" (UID: "907ab3c5-1c04-422a-ac94-a6179bd6e9ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.830798 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.833429 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h" (OuterVolumeSpecName: "kube-api-access-gtw9h") pod "907ab3c5-1c04-422a-ac94-a6179bd6e9ac" (UID: "907ab3c5-1c04-422a-ac94-a6179bd6e9ac"). InnerVolumeSpecName "kube-api-access-gtw9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.833568 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.847295 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-xbh95-config-sld7x"] Jan 31 05:59:54 crc kubenswrapper[4712]: E0131 05:59:54.847711 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907ab3c5-1c04-422a-ac94-a6179bd6e9ac" containerName="mariadb-account-create-update" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.847729 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="907ab3c5-1c04-422a-ac94-a6179bd6e9ac" containerName="mariadb-account-create-update" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.847934 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="907ab3c5-1c04-422a-ac94-a6179bd6e9ac" containerName="mariadb-account-create-update" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.848266 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk5r6\" (UniqueName: \"kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6\") pod \"keystone-db-sync-mngsk\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.848592 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.852566 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.861860 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95-config-sld7x"] Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.865803 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-667z8" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930304 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930404 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930434 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930511 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvkjc\" (UniqueName: \"kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930549 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930587 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930645 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:54 crc kubenswrapper[4712]: I0131 05:59:54.930661 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtw9h\" (UniqueName: \"kubernetes.io/projected/907ab3c5-1c04-422a-ac94-a6179bd6e9ac-kube-api-access-gtw9h\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.003074 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mngsk" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.031820 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwthb\" (UniqueName: \"kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb\") pod \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.031981 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts\") pod \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\" (UID: \"0a2b25a7-db91-4e29-9d5e-6acaa45cc200\") " Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032324 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032378 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032402 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032466 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvkjc\" (UniqueName: \"kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032505 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.032552 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.033898 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.034493 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.034623 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.036191 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb" (OuterVolumeSpecName: "kube-api-access-nwthb") pod "0a2b25a7-db91-4e29-9d5e-6acaa45cc200" (UID: "0a2b25a7-db91-4e29-9d5e-6acaa45cc200"). InnerVolumeSpecName "kube-api-access-nwthb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.036608 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.037139 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0a2b25a7-db91-4e29-9d5e-6acaa45cc200" (UID: "0a2b25a7-db91-4e29-9d5e-6acaa45cc200"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.037269 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.049889 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvkjc\" (UniqueName: \"kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc\") pod \"ovn-controller-xbh95-config-sld7x\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.135240 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.135270 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwthb\" (UniqueName: \"kubernetes.io/projected/0a2b25a7-db91-4e29-9d5e-6acaa45cc200-kube-api-access-nwthb\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.189685 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.256614 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-667z8" event={"ID":"0a2b25a7-db91-4e29-9d5e-6acaa45cc200","Type":"ContainerDied","Data":"c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb"} Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.256655 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1ca32fd0c8a8de75f458e20cdb442e64e67e42a2772c9c67337008e0a7ac3eb" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.256729 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-667z8" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.275887 4712 generic.go:334] "Generic (PLEG): container finished" podID="47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" containerID="956833f5d936e29243b8676d025075d9b3ab4ce0a134b2649a294772c9ab1045" exitCode=0 Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.276013 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-246d-account-create-update-zm6nr" event={"ID":"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe","Type":"ContainerDied","Data":"956833f5d936e29243b8676d025075d9b3ab4ce0a134b2649a294772c9ab1045"} Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.278435 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-ldrnk" event={"ID":"907ab3c5-1c04-422a-ac94-a6179bd6e9ac","Type":"ContainerDied","Data":"4617610c13208452cd5a4903cce93437a689bf5a5f9c0438e310fe288b20086a"} Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.278487 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4617610c13208452cd5a4903cce93437a689bf5a5f9c0438e310fe288b20086a" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.278576 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-ldrnk" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.304604 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mngsk"] Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.866248 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-xbh95-config-sld7x"] Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.947436 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.956658 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.967976 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:55 crc kubenswrapper[4712]: I0131 05:59:55.974559 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.052438 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts\") pod \"2998b9bb-f92a-427c-824a-80e66ff0643b\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.052799 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts\") pod \"4a28a1ff-7c14-412e-9a7f-5a0018859762\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.052949 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwhzn\" (UniqueName: \"kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn\") pod \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.053055 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqlmw\" (UniqueName: \"kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw\") pod \"2998b9bb-f92a-427c-824a-80e66ff0643b\" (UID: \"2998b9bb-f92a-427c-824a-80e66ff0643b\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.053129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts\") pod \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.053311 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t9v9\" (UniqueName: \"kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9\") pod \"4a28a1ff-7c14-412e-9a7f-5a0018859762\" (UID: \"4a28a1ff-7c14-412e-9a7f-5a0018859762\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.053453 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts\") pod \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\" (UID: \"fdbe92ef-e3d9-4528-bc92-ba7309daafe4\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.053573 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsqkf\" (UniqueName: \"kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf\") pod \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\" (UID: \"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054142 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a28a1ff-7c14-412e-9a7f-5a0018859762" (UID: "4a28a1ff-7c14-412e-9a7f-5a0018859762"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054283 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" (UID: "24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054284 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fdbe92ef-e3d9-4528-bc92-ba7309daafe4" (UID: "fdbe92ef-e3d9-4528-bc92-ba7309daafe4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054536 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2998b9bb-f92a-427c-824a-80e66ff0643b" (UID: "2998b9bb-f92a-427c-824a-80e66ff0643b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054859 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.054941 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2998b9bb-f92a-427c-824a-80e66ff0643b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.055003 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a28a1ff-7c14-412e-9a7f-5a0018859762-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.055082 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.059414 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn" (OuterVolumeSpecName: "kube-api-access-mwhzn") pod "fdbe92ef-e3d9-4528-bc92-ba7309daafe4" (UID: "fdbe92ef-e3d9-4528-bc92-ba7309daafe4"). InnerVolumeSpecName "kube-api-access-mwhzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.059459 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw" (OuterVolumeSpecName: "kube-api-access-fqlmw") pod "2998b9bb-f92a-427c-824a-80e66ff0643b" (UID: "2998b9bb-f92a-427c-824a-80e66ff0643b"). InnerVolumeSpecName "kube-api-access-fqlmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.060896 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9" (OuterVolumeSpecName: "kube-api-access-7t9v9") pod "4a28a1ff-7c14-412e-9a7f-5a0018859762" (UID: "4a28a1ff-7c14-412e-9a7f-5a0018859762"). InnerVolumeSpecName "kube-api-access-7t9v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.060989 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf" (OuterVolumeSpecName: "kube-api-access-fsqkf") pod "24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" (UID: "24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc"). InnerVolumeSpecName "kube-api-access-fsqkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.157495 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwhzn\" (UniqueName: \"kubernetes.io/projected/fdbe92ef-e3d9-4528-bc92-ba7309daafe4-kube-api-access-mwhzn\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.157736 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqlmw\" (UniqueName: \"kubernetes.io/projected/2998b9bb-f92a-427c-824a-80e66ff0643b-kube-api-access-fqlmw\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.157896 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t9v9\" (UniqueName: \"kubernetes.io/projected/4a28a1ff-7c14-412e-9a7f-5a0018859762-kube-api-access-7t9v9\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.158051 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsqkf\" (UniqueName: \"kubernetes.io/projected/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc-kube-api-access-fsqkf\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.288902 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-g4wb5" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.289364 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-g4wb5" event={"ID":"4a28a1ff-7c14-412e-9a7f-5a0018859762","Type":"ContainerDied","Data":"cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.289436 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf9448f3e7f6d6d01f53c8000dc5729f05b077971baac90c5bf448ac5ecc59c9" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.291233 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-72bxm" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.291253 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-72bxm" event={"ID":"24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc","Type":"ContainerDied","Data":"08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.291316 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08138d7505b056cef3d3a920367e6d3c826c81eea6e580f10d9baaa4ebfeb667" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.292827 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-19bb-account-create-update-stcpz" event={"ID":"2998b9bb-f92a-427c-824a-80e66ff0643b","Type":"ContainerDied","Data":"c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.292890 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3093c170d4c5014c1ee8469ffb703f35358732c9234258e3274873df851b603" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.292849 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-19bb-account-create-update-stcpz" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.294468 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8725-account-create-update-n2mlq" event={"ID":"fdbe92ef-e3d9-4528-bc92-ba7309daafe4","Type":"ContainerDied","Data":"8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.294498 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8725-account-create-update-n2mlq" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.294501 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c6940122894cb7d74236d6c82510ade1cf4ead79a8ace7791488e068a8de6ae" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.300574 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mngsk" event={"ID":"1df94e08-77d8-4734-9985-81bc69a91cf2","Type":"ContainerStarted","Data":"a15efc770c5d46539c555fea3bfca66aab6a4001f088b0c652e619c4e77edbd8"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.304655 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-sld7x" event={"ID":"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec","Type":"ContainerStarted","Data":"15e1ec8fd5f53f590e86a9d72b74186a3bfd6ef9a4cbf179b935574ef1602022"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.304708 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-sld7x" event={"ID":"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec","Type":"ContainerStarted","Data":"91410343d3b216a24dece306d5dcf9236b0aa9225978485a363e2a2f09c345b0"} Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.328262 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-xbh95-config-sld7x" podStartSLOduration=2.328238888 podStartE2EDuration="2.328238888s" podCreationTimestamp="2026-01-31 05:59:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 05:59:56.325319566 +0000 UTC m=+1262.419201417" watchObservedRunningTime="2026-01-31 05:59:56.328238888 +0000 UTC m=+1262.422120729" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.546891 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61eb882f-2dfd-42c1-897d-fecf20f598a6" path="/var/lib/kubelet/pods/61eb882f-2dfd-42c1-897d-fecf20f598a6/volumes" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.584471 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.669645 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts\") pod \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.669692 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcjnq\" (UniqueName: \"kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq\") pod \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\" (UID: \"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe\") " Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.670531 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" (UID: "47c8e6c2-dd63-4d15-8c59-f1aa014bdebe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.674434 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq" (OuterVolumeSpecName: "kube-api-access-kcjnq") pod "47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" (UID: "47c8e6c2-dd63-4d15-8c59-f1aa014bdebe"). InnerVolumeSpecName "kube-api-access-kcjnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.772571 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:56 crc kubenswrapper[4712]: I0131 05:59:56.772610 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcjnq\" (UniqueName: \"kubernetes.io/projected/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe-kube-api-access-kcjnq\") on node \"crc\" DevicePath \"\"" Jan 31 05:59:57 crc kubenswrapper[4712]: I0131 05:59:57.315566 4712 generic.go:334] "Generic (PLEG): container finished" podID="9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" containerID="15e1ec8fd5f53f590e86a9d72b74186a3bfd6ef9a4cbf179b935574ef1602022" exitCode=0 Jan 31 05:59:57 crc kubenswrapper[4712]: I0131 05:59:57.315666 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-sld7x" event={"ID":"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec","Type":"ContainerDied","Data":"15e1ec8fd5f53f590e86a9d72b74186a3bfd6ef9a4cbf179b935574ef1602022"} Jan 31 05:59:57 crc kubenswrapper[4712]: I0131 05:59:57.318986 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-246d-account-create-update-zm6nr" event={"ID":"47c8e6c2-dd63-4d15-8c59-f1aa014bdebe","Type":"ContainerDied","Data":"616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79"} Jan 31 05:59:57 crc kubenswrapper[4712]: I0131 05:59:57.319029 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="616366a3f98d9858e5613d6d021ad9f736f7c1493a2d849e7a0446e92de13e79" Jan 31 05:59:57 crc kubenswrapper[4712]: I0131 05:59:57.319062 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-246d-account-create-update-zm6nr" Jan 31 05:59:59 crc kubenswrapper[4712]: I0131 05:59:59.122044 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:59 crc kubenswrapper[4712]: I0131 05:59:59.131265 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/526325aa-f517-45ab-b0d3-b7285ef8db7b-etc-swift\") pod \"swift-storage-0\" (UID: \"526325aa-f517-45ab-b0d3-b7285ef8db7b\") " pod="openstack/swift-storage-0" Jan 31 05:59:59 crc kubenswrapper[4712]: I0131 05:59:59.229360 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.138430 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s"] Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139425 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a28a1ff-7c14-412e-9a7f-5a0018859762" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139447 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a28a1ff-7c14-412e-9a7f-5a0018859762" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139464 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdbe92ef-e3d9-4528-bc92-ba7309daafe4" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139473 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdbe92ef-e3d9-4528-bc92-ba7309daafe4" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139492 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139501 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139517 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139526 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139535 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2998b9bb-f92a-427c-824a-80e66ff0643b" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139544 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2998b9bb-f92a-427c-824a-80e66ff0643b" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: E0131 06:00:00.139565 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2b25a7-db91-4e29-9d5e-6acaa45cc200" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139593 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2b25a7-db91-4e29-9d5e-6acaa45cc200" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139840 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2998b9bb-f92a-427c-824a-80e66ff0643b" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139858 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139872 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139891 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2b25a7-db91-4e29-9d5e-6acaa45cc200" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139905 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a28a1ff-7c14-412e-9a7f-5a0018859762" containerName="mariadb-database-create" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.139917 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdbe92ef-e3d9-4528-bc92-ba7309daafe4" containerName="mariadb-account-create-update" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.140766 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.143520 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.144196 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.145331 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fpq7\" (UniqueName: \"kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.145524 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.145676 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.150955 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s"] Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.247356 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.247517 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fpq7\" (UniqueName: \"kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.247617 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.248540 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.252794 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.267895 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fpq7\" (UniqueName: \"kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7\") pod \"collect-profiles-29497320-wpq6s\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:00 crc kubenswrapper[4712]: I0131 06:00:00.467010 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:10 crc kubenswrapper[4712]: E0131 06:00:10.528849 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-glance-api:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:00:10 crc kubenswrapper[4712]: E0131 06:00:10.529617 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-glance-api:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:00:10 crc kubenswrapper[4712]: E0131 06:00:10.529781 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-glance-api:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l56vp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-qp52f_openstack(a59772f8-fb2b-4ccd-80e3-de90890503d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:00:10 crc kubenswrapper[4712]: E0131 06:00:10.530971 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-qp52f" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" Jan 31 06:00:11 crc kubenswrapper[4712]: E0131 06:00:11.481759 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-glance-api:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/glance-db-sync-qp52f" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" Jan 31 06:00:12 crc kubenswrapper[4712]: I0131 06:00:12.498001 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:00:12 crc kubenswrapper[4712]: I0131 06:00:12.498555 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:00:13 crc kubenswrapper[4712]: E0131 06:00:13.099303 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-keystone:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:00:13 crc kubenswrapper[4712]: E0131 06:00:13.099557 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-keystone:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:00:13 crc kubenswrapper[4712]: E0131 06:00:13.099814 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-keystone:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xk5r6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-mngsk_openstack(1df94e08-77d8-4734-9985-81bc69a91cf2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:00:13 crc kubenswrapper[4712]: E0131 06:00:13.101133 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/keystone-db-sync-mngsk" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.220062 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.330972 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331129 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331547 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331722 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331773 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331816 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvkjc\" (UniqueName: \"kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331844 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run\") pod \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\" (UID: \"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec\") " Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331894 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.331993 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run" (OuterVolumeSpecName: "var-run") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.332295 4712 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.332317 4712 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.332330 4712 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.332850 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.333052 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts" (OuterVolumeSpecName: "scripts") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.340013 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc" (OuterVolumeSpecName: "kube-api-access-hvkjc") pod "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" (UID: "9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec"). InnerVolumeSpecName "kube-api-access-hvkjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.434126 4712 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.434165 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvkjc\" (UniqueName: \"kubernetes.io/projected/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-kube-api-access-hvkjc\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.434206 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.496770 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-xbh95-config-sld7x" Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.496816 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-xbh95-config-sld7x" event={"ID":"9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec","Type":"ContainerDied","Data":"91410343d3b216a24dece306d5dcf9236b0aa9225978485a363e2a2f09c345b0"} Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.496859 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91410343d3b216a24dece306d5dcf9236b0aa9225978485a363e2a2f09c345b0" Jan 31 06:00:13 crc kubenswrapper[4712]: E0131 06:00:13.499846 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-keystone:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/keystone-db-sync-mngsk" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" Jan 31 06:00:13 crc kubenswrapper[4712]: W0131 06:00:13.630431 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f57bed8_d02f_4e4d_9ca8_11f061b37df5.slice/crio-6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39 WatchSource:0}: Error finding container 6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39: Status 404 returned error can't find the container with id 6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39 Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.636658 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s"] Jan 31 06:00:13 crc kubenswrapper[4712]: I0131 06:00:13.770950 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 31 06:00:13 crc kubenswrapper[4712]: W0131 06:00:13.776499 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod526325aa_f517_45ab_b0d3_b7285ef8db7b.slice/crio-c74873bba1296d8fa9007c224a01684639b46689e0adcfbaf1486ff324cd985d WatchSource:0}: Error finding container c74873bba1296d8fa9007c224a01684639b46689e0adcfbaf1486ff324cd985d: Status 404 returned error can't find the container with id c74873bba1296d8fa9007c224a01684639b46689e0adcfbaf1486ff324cd985d Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.311525 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-xbh95-config-sld7x"] Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.323240 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-xbh95-config-sld7x"] Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.516374 4712 generic.go:334] "Generic (PLEG): container finished" podID="9f57bed8-d02f-4e4d-9ca8-11f061b37df5" containerID="ba0a0bc0080c32364c7aac525e2dd1185e9292e16ce1933f89846ce597c618c5" exitCode=0 Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.516437 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" path="/var/lib/kubelet/pods/9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec/volumes" Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.517371 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"c74873bba1296d8fa9007c224a01684639b46689e0adcfbaf1486ff324cd985d"} Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.517398 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" event={"ID":"9f57bed8-d02f-4e4d-9ca8-11f061b37df5","Type":"ContainerDied","Data":"ba0a0bc0080c32364c7aac525e2dd1185e9292e16ce1933f89846ce597c618c5"} Jan 31 06:00:14 crc kubenswrapper[4712]: I0131 06:00:14.517412 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" event={"ID":"9f57bed8-d02f-4e4d-9ca8-11f061b37df5","Type":"ContainerStarted","Data":"6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39"} Jan 31 06:00:15 crc kubenswrapper[4712]: I0131 06:00:15.544816 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"fbe921c12bdb459ad4ec0226336b2c65fc639d228be760d0ca7a82e1e705b63a"} Jan 31 06:00:15 crc kubenswrapper[4712]: I0131 06:00:15.545455 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"f0f773ce64229b87ef18ba00356cf5a8bcf15e5a518888ad7ce3bcb743c8ae98"} Jan 31 06:00:15 crc kubenswrapper[4712]: I0131 06:00:15.545473 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"cebea7d88d27f19363839fff6cf9a6e9a119df3165981ee81254427a2dc38341"} Jan 31 06:00:15 crc kubenswrapper[4712]: I0131 06:00:15.545482 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"197493d71f3bd2f43bdff91d1c7642410c4df551873ac460ddf252334b1559ab"} Jan 31 06:00:15 crc kubenswrapper[4712]: I0131 06:00:15.939912 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.084416 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fpq7\" (UniqueName: \"kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7\") pod \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.084577 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume\") pod \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.084685 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume\") pod \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\" (UID: \"9f57bed8-d02f-4e4d-9ca8-11f061b37df5\") " Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.086794 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume" (OuterVolumeSpecName: "config-volume") pod "9f57bed8-d02f-4e4d-9ca8-11f061b37df5" (UID: "9f57bed8-d02f-4e4d-9ca8-11f061b37df5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.094451 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7" (OuterVolumeSpecName: "kube-api-access-7fpq7") pod "9f57bed8-d02f-4e4d-9ca8-11f061b37df5" (UID: "9f57bed8-d02f-4e4d-9ca8-11f061b37df5"). InnerVolumeSpecName "kube-api-access-7fpq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.094581 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9f57bed8-d02f-4e4d-9ca8-11f061b37df5" (UID: "9f57bed8-d02f-4e4d-9ca8-11f061b37df5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.186840 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.187101 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.187163 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fpq7\" (UniqueName: \"kubernetes.io/projected/9f57bed8-d02f-4e4d-9ca8-11f061b37df5-kube-api-access-7fpq7\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.560664 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"090354837c26b6bb1d8b852fc5b9110ff690e33bdb80044882b37cd6bd5ec90e"} Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.562975 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" event={"ID":"9f57bed8-d02f-4e4d-9ca8-11f061b37df5","Type":"ContainerDied","Data":"6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39"} Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.563017 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6162c192c19e96fe213344813db531a80d63726e2d158986f22282405374ab39" Jan 31 06:00:16 crc kubenswrapper[4712]: I0131 06:00:16.563034 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s" Jan 31 06:00:17 crc kubenswrapper[4712]: I0131 06:00:17.574958 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"4932dd455052442c9825ec1b5fbb550944b1988d548dba9652efb27cc19cd235"} Jan 31 06:00:17 crc kubenswrapper[4712]: I0131 06:00:17.575375 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"d0cf3c6d1a5ab7503ffd56080fd34f6b6a47b688a3fa5d361b15bc5bf073c025"} Jan 31 06:00:17 crc kubenswrapper[4712]: I0131 06:00:17.575392 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"0742c797595be4b7ef35be034ed99950dc14f87436762c209ea5900df79b0346"} Jan 31 06:00:19 crc kubenswrapper[4712]: I0131 06:00:19.622051 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"4c81fdcf26d0b29b47ce93fa9b60d7985df45e5c5c95ad0a614045b21f27bf19"} Jan 31 06:00:19 crc kubenswrapper[4712]: I0131 06:00:19.622484 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"98375ebd893e7bcd8e4702589d87096f880c076e4fe1c9b07264ee4a09c3c570"} Jan 31 06:00:19 crc kubenswrapper[4712]: I0131 06:00:19.622500 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"2df182e6704e4ccc7de47a8bfd78e46d95dcd54efdefdcfa23b39f801c2983c6"} Jan 31 06:00:19 crc kubenswrapper[4712]: I0131 06:00:19.622511 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"d2a1e11cf31fc6d84557e886a9d7da5496bb6c2b30ca10534870febe916390e9"} Jan 31 06:00:19 crc kubenswrapper[4712]: I0131 06:00:19.622522 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"ce69404bfa30a945cfd43bc243738ed04ec4fd34d285e2477194811f02d07a88"} Jan 31 06:00:20 crc kubenswrapper[4712]: I0131 06:00:20.639078 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"4097af84536a17e5a3acb9b5eacae8c76da524c2bc4d3041a7b741b6f3aac7a3"} Jan 31 06:00:20 crc kubenswrapper[4712]: I0131 06:00:20.639633 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"526325aa-f517-45ab-b0d3-b7285ef8db7b","Type":"ContainerStarted","Data":"f362341855f492df5a0376e81d81fd212c09242fc47bf01c3a6c49843dfadf2c"} Jan 31 06:00:20 crc kubenswrapper[4712]: I0131 06:00:20.705748 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=50.196804532 podStartE2EDuration="54.705729473s" podCreationTimestamp="2026-01-31 05:59:26 +0000 UTC" firstStartedPulling="2026-01-31 06:00:13.779122595 +0000 UTC m=+1279.873004446" lastFinishedPulling="2026-01-31 06:00:18.288047546 +0000 UTC m=+1284.381929387" observedRunningTime="2026-01-31 06:00:20.695998103 +0000 UTC m=+1286.789879944" watchObservedRunningTime="2026-01-31 06:00:20.705729473 +0000 UTC m=+1286.799611314" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.017603 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:00:21 crc kubenswrapper[4712]: E0131 06:00:21.018093 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" containerName="ovn-config" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.018115 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" containerName="ovn-config" Jan 31 06:00:21 crc kubenswrapper[4712]: E0131 06:00:21.018136 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f57bed8-d02f-4e4d-9ca8-11f061b37df5" containerName="collect-profiles" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.018153 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f57bed8-d02f-4e4d-9ca8-11f061b37df5" containerName="collect-profiles" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.018417 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2db8a1-ac5f-4a3c-ab67-b7dce0457eec" containerName="ovn-config" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.018447 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f57bed8-d02f-4e4d-9ca8-11f061b37df5" containerName="collect-profiles" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.019664 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.021996 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.034860 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080208 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080251 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080300 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080529 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080609 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbbwx\" (UniqueName: \"kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.080767 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.182427 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.183372 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbbwx\" (UniqueName: \"kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.183476 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.183384 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.183817 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.183898 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.184011 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.184315 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.184487 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.184603 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.184919 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.203263 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbbwx\" (UniqueName: \"kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx\") pod \"dnsmasq-dns-664494d585-dvq5x\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.343891 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:21 crc kubenswrapper[4712]: I0131 06:00:21.866412 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:00:21 crc kubenswrapper[4712]: W0131 06:00:21.870977 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode102ed01_11fa_4873_8e5b_3c4102fbdabe.slice/crio-30207f55c067957c296d8f68f0f4b2b97ca8098c46945c11912fb33016fb2ebf WatchSource:0}: Error finding container 30207f55c067957c296d8f68f0f4b2b97ca8098c46945c11912fb33016fb2ebf: Status 404 returned error can't find the container with id 30207f55c067957c296d8f68f0f4b2b97ca8098c46945c11912fb33016fb2ebf Jan 31 06:00:22 crc kubenswrapper[4712]: I0131 06:00:22.660837 4712 generic.go:334] "Generic (PLEG): container finished" podID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerID="ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396" exitCode=0 Jan 31 06:00:22 crc kubenswrapper[4712]: I0131 06:00:22.660882 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664494d585-dvq5x" event={"ID":"e102ed01-11fa-4873-8e5b-3c4102fbdabe","Type":"ContainerDied","Data":"ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396"} Jan 31 06:00:22 crc kubenswrapper[4712]: I0131 06:00:22.661225 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664494d585-dvq5x" event={"ID":"e102ed01-11fa-4873-8e5b-3c4102fbdabe","Type":"ContainerStarted","Data":"30207f55c067957c296d8f68f0f4b2b97ca8098c46945c11912fb33016fb2ebf"} Jan 31 06:00:23 crc kubenswrapper[4712]: I0131 06:00:23.670309 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664494d585-dvq5x" event={"ID":"e102ed01-11fa-4873-8e5b-3c4102fbdabe","Type":"ContainerStarted","Data":"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681"} Jan 31 06:00:23 crc kubenswrapper[4712]: I0131 06:00:23.671263 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:23 crc kubenswrapper[4712]: I0131 06:00:23.697437 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-664494d585-dvq5x" podStartSLOduration=3.6974149450000002 podStartE2EDuration="3.697414945s" podCreationTimestamp="2026-01-31 06:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:00:23.691517359 +0000 UTC m=+1289.785399200" watchObservedRunningTime="2026-01-31 06:00:23.697414945 +0000 UTC m=+1289.791296786" Jan 31 06:00:25 crc kubenswrapper[4712]: I0131 06:00:25.699888 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qp52f" event={"ID":"a59772f8-fb2b-4ccd-80e3-de90890503d9","Type":"ContainerStarted","Data":"82dc477ea41d1afefec9b15ce55e9d53bb61245bc13a203e4d408bc8d205ab81"} Jan 31 06:00:25 crc kubenswrapper[4712]: I0131 06:00:25.723522 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-qp52f" podStartSLOduration=3.274627379 podStartE2EDuration="41.723489938s" podCreationTimestamp="2026-01-31 05:59:44 +0000 UTC" firstStartedPulling="2026-01-31 05:59:45.477848536 +0000 UTC m=+1251.571730377" lastFinishedPulling="2026-01-31 06:00:23.926711095 +0000 UTC m=+1290.020592936" observedRunningTime="2026-01-31 06:00:25.717156191 +0000 UTC m=+1291.811038032" watchObservedRunningTime="2026-01-31 06:00:25.723489938 +0000 UTC m=+1291.817371779" Jan 31 06:00:29 crc kubenswrapper[4712]: I0131 06:00:29.736831 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mngsk" event={"ID":"1df94e08-77d8-4734-9985-81bc69a91cf2","Type":"ContainerStarted","Data":"bc385aff64b8c56b41714eca2dbc2a8109dbbf2053ec525d5961a3dcdff7f6bb"} Jan 31 06:00:29 crc kubenswrapper[4712]: I0131 06:00:29.757977 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-mngsk" podStartSLOduration=2.392462233 podStartE2EDuration="35.757956664s" podCreationTimestamp="2026-01-31 05:59:54 +0000 UTC" firstStartedPulling="2026-01-31 05:59:55.326609369 +0000 UTC m=+1261.420491210" lastFinishedPulling="2026-01-31 06:00:28.6921038 +0000 UTC m=+1294.785985641" observedRunningTime="2026-01-31 06:00:29.757107793 +0000 UTC m=+1295.850989654" watchObservedRunningTime="2026-01-31 06:00:29.757956664 +0000 UTC m=+1295.851838505" Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.345468 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.406443 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.406709 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="dnsmasq-dns" containerID="cri-o://a3e0aae95726a49c7292184cf8f574f430ed94a4b288cbb06cb8060e4b5762e3" gracePeriod=10 Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.755498 4712 generic.go:334] "Generic (PLEG): container finished" podID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerID="a3e0aae95726a49c7292184cf8f574f430ed94a4b288cbb06cb8060e4b5762e3" exitCode=0 Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.755607 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" event={"ID":"d72aff95-0c44-495c-8fa5-d4a4ef3aa765","Type":"ContainerDied","Data":"a3e0aae95726a49c7292184cf8f574f430ed94a4b288cbb06cb8060e4b5762e3"} Jan 31 06:00:31 crc kubenswrapper[4712]: I0131 06:00:31.934196 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.015458 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc\") pod \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.015562 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb\") pod \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.015599 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brn64\" (UniqueName: \"kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64\") pod \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.015624 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config\") pod \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.015642 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb\") pod \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\" (UID: \"d72aff95-0c44-495c-8fa5-d4a4ef3aa765\") " Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.030130 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64" (OuterVolumeSpecName: "kube-api-access-brn64") pod "d72aff95-0c44-495c-8fa5-d4a4ef3aa765" (UID: "d72aff95-0c44-495c-8fa5-d4a4ef3aa765"). InnerVolumeSpecName "kube-api-access-brn64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.063718 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config" (OuterVolumeSpecName: "config") pod "d72aff95-0c44-495c-8fa5-d4a4ef3aa765" (UID: "d72aff95-0c44-495c-8fa5-d4a4ef3aa765"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.067411 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d72aff95-0c44-495c-8fa5-d4a4ef3aa765" (UID: "d72aff95-0c44-495c-8fa5-d4a4ef3aa765"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.071075 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d72aff95-0c44-495c-8fa5-d4a4ef3aa765" (UID: "d72aff95-0c44-495c-8fa5-d4a4ef3aa765"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.071287 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d72aff95-0c44-495c-8fa5-d4a4ef3aa765" (UID: "d72aff95-0c44-495c-8fa5-d4a4ef3aa765"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.117944 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.117978 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.117993 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brn64\" (UniqueName: \"kubernetes.io/projected/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-kube-api-access-brn64\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.118004 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.118012 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d72aff95-0c44-495c-8fa5-d4a4ef3aa765-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.774352 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" event={"ID":"d72aff95-0c44-495c-8fa5-d4a4ef3aa765","Type":"ContainerDied","Data":"2452862451a153a61b78baf4a3e68b382ff40b8a62a2eb3487fa3b1d7f85b2e3"} Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.774755 4712 scope.go:117] "RemoveContainer" containerID="a3e0aae95726a49c7292184cf8f574f430ed94a4b288cbb06cb8060e4b5762e3" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.774995 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665586dd7c-2w2wq" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.798995 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.804268 4712 scope.go:117] "RemoveContainer" containerID="2c588e139bf82831700b59759d4bcbaeb33d22ce1f0a4e4f9ba6703420f314bd" Jan 31 06:00:32 crc kubenswrapper[4712]: I0131 06:00:32.808441 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-665586dd7c-2w2wq"] Jan 31 06:00:34 crc kubenswrapper[4712]: I0131 06:00:34.515956 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" path="/var/lib/kubelet/pods/d72aff95-0c44-495c-8fa5-d4a4ef3aa765/volumes" Jan 31 06:00:42 crc kubenswrapper[4712]: I0131 06:00:42.499699 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:00:42 crc kubenswrapper[4712]: I0131 06:00:42.500410 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:00:42 crc kubenswrapper[4712]: I0131 06:00:42.500465 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:00:42 crc kubenswrapper[4712]: I0131 06:00:42.501413 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:00:42 crc kubenswrapper[4712]: I0131 06:00:42.501499 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76" gracePeriod=600 Jan 31 06:00:44 crc kubenswrapper[4712]: I0131 06:00:44.901228 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76" exitCode=0 Jan 31 06:00:44 crc kubenswrapper[4712]: I0131 06:00:44.901292 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76"} Jan 31 06:00:44 crc kubenswrapper[4712]: I0131 06:00:44.901798 4712 scope.go:117] "RemoveContainer" containerID="d072cbc66487ddb6d9c89bfc7420c017cf30064480bcc1f0a5508bf27bbaeb59" Jan 31 06:00:47 crc kubenswrapper[4712]: I0131 06:00:47.928324 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37"} Jan 31 06:00:52 crc kubenswrapper[4712]: I0131 06:00:52.979355 4712 generic.go:334] "Generic (PLEG): container finished" podID="1df94e08-77d8-4734-9985-81bc69a91cf2" containerID="bc385aff64b8c56b41714eca2dbc2a8109dbbf2053ec525d5961a3dcdff7f6bb" exitCode=0 Jan 31 06:00:52 crc kubenswrapper[4712]: I0131 06:00:52.979444 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mngsk" event={"ID":"1df94e08-77d8-4734-9985-81bc69a91cf2","Type":"ContainerDied","Data":"bc385aff64b8c56b41714eca2dbc2a8109dbbf2053ec525d5961a3dcdff7f6bb"} Jan 31 06:00:53 crc kubenswrapper[4712]: I0131 06:00:53.994022 4712 generic.go:334] "Generic (PLEG): container finished" podID="a59772f8-fb2b-4ccd-80e3-de90890503d9" containerID="82dc477ea41d1afefec9b15ce55e9d53bb61245bc13a203e4d408bc8d205ab81" exitCode=0 Jan 31 06:00:53 crc kubenswrapper[4712]: I0131 06:00:53.994156 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qp52f" event={"ID":"a59772f8-fb2b-4ccd-80e3-de90890503d9","Type":"ContainerDied","Data":"82dc477ea41d1afefec9b15ce55e9d53bb61245bc13a203e4d408bc8d205ab81"} Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.373285 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mngsk" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.480602 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle\") pod \"1df94e08-77d8-4734-9985-81bc69a91cf2\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.480750 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data\") pod \"1df94e08-77d8-4734-9985-81bc69a91cf2\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.480798 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk5r6\" (UniqueName: \"kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6\") pod \"1df94e08-77d8-4734-9985-81bc69a91cf2\" (UID: \"1df94e08-77d8-4734-9985-81bc69a91cf2\") " Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.498669 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6" (OuterVolumeSpecName: "kube-api-access-xk5r6") pod "1df94e08-77d8-4734-9985-81bc69a91cf2" (UID: "1df94e08-77d8-4734-9985-81bc69a91cf2"). InnerVolumeSpecName "kube-api-access-xk5r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.510651 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1df94e08-77d8-4734-9985-81bc69a91cf2" (UID: "1df94e08-77d8-4734-9985-81bc69a91cf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.551144 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data" (OuterVolumeSpecName: "config-data") pod "1df94e08-77d8-4734-9985-81bc69a91cf2" (UID: "1df94e08-77d8-4734-9985-81bc69a91cf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.583587 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.583624 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df94e08-77d8-4734-9985-81bc69a91cf2-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:54 crc kubenswrapper[4712]: I0131 06:00:54.583636 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk5r6\" (UniqueName: \"kubernetes.io/projected/1df94e08-77d8-4734-9985-81bc69a91cf2-kube-api-access-xk5r6\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.012096 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mngsk" event={"ID":"1df94e08-77d8-4734-9985-81bc69a91cf2","Type":"ContainerDied","Data":"a15efc770c5d46539c555fea3bfca66aab6a4001f088b0c652e619c4e77edbd8"} Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.012473 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a15efc770c5d46539c555fea3bfca66aab6a4001f088b0c652e619c4e77edbd8" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.012158 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mngsk" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.345351 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:00:55 crc kubenswrapper[4712]: E0131 06:00:55.345831 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="dnsmasq-dns" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.345846 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="dnsmasq-dns" Jan 31 06:00:55 crc kubenswrapper[4712]: E0131 06:00:55.345860 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="init" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.345866 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="init" Jan 31 06:00:55 crc kubenswrapper[4712]: E0131 06:00:55.345882 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" containerName="keystone-db-sync" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.345890 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" containerName="keystone-db-sync" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.346088 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" containerName="keystone-db-sync" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.346112 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d72aff95-0c44-495c-8fa5-d4a4ef3aa765" containerName="dnsmasq-dns" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.347218 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.354703 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-nrqs4"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.356015 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.363808 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.363846 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.364073 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.364142 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kdwj2" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.364315 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.373845 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.395302 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nrqs4"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505429 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7g5b\" (UniqueName: \"kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505472 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwt8k\" (UniqueName: \"kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505497 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505523 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505547 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505570 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505597 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505624 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505665 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505690 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505708 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.505734 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.540736 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-tp7gg"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.541880 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.544062 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.544352 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.544430 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kdmz5" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.561128 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-tp7gg"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.609644 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7g5b\" (UniqueName: \"kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610023 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwt8k\" (UniqueName: \"kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610127 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610257 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610403 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610519 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610616 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610713 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610802 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610877 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncsts\" (UniqueName: \"kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.610964 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611043 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611138 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611276 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611429 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611516 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611666 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.611782 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.613213 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.616088 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.621106 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.622430 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.631053 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.633796 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.640338 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.640667 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.641021 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.641935 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.643495 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.654643 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.654842 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.659732 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwt8k\" (UniqueName: \"kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k\") pod \"dnsmasq-dns-549896c55c-5rr59\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.671620 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7g5b\" (UniqueName: \"kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.681025 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data\") pod \"keystone-bootstrap-nrqs4\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.692148 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.697496 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.722750 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724251 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724297 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724376 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724408 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724433 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724499 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724523 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724562 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncsts\" (UniqueName: \"kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724596 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724621 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724688 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfwm5\" (UniqueName: \"kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724712 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.724786 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.725406 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.733734 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.742609 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.747759 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.755268 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bp49d"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.756555 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.764312 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.777497 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncsts\" (UniqueName: \"kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts\") pod \"cinder-db-sync-tp7gg\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.789363 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bp49d"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.796698 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.797104 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mspqc" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.820150 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826681 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfwm5\" (UniqueName: \"kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826756 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826785 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826815 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgzbz\" (UniqueName: \"kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826840 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826891 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826921 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826945 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826975 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.826997 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.827595 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.827866 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.840056 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.847239 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mh8d8"] Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.847839 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.856532 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.856910 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.857734 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.871542 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.871790 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.917163 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kt4kg" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.918350 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfwm5\" (UniqueName: \"kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5\") pod \"ceilometer-0\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " pod="openstack/ceilometer-0" Jan 31 06:00:55 crc kubenswrapper[4712]: I0131 06:00:55.972683 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qp52f" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.016539 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.016655 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgzbz\" (UniqueName: \"kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.016790 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.095196 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgzbz\" (UniqueName: \"kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.120132 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle\") pod \"a59772f8-fb2b-4ccd-80e3-de90890503d9\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.120573 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data\") pod \"a59772f8-fb2b-4ccd-80e3-de90890503d9\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.120675 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l56vp\" (UniqueName: \"kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp\") pod \"a59772f8-fb2b-4ccd-80e3-de90890503d9\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.121306 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data\") pod \"a59772f8-fb2b-4ccd-80e3-de90890503d9\" (UID: \"a59772f8-fb2b-4ccd-80e3-de90890503d9\") " Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.122352 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.122469 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74tdc\" (UniqueName: \"kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.122646 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.127747 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.141467 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle\") pod \"neutron-db-sync-bp49d\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.148027 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp" (OuterVolumeSpecName: "kube-api-access-l56vp") pod "a59772f8-fb2b-4ccd-80e3-de90890503d9" (UID: "a59772f8-fb2b-4ccd-80e3-de90890503d9"). InnerVolumeSpecName "kube-api-access-l56vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.148426 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a59772f8-fb2b-4ccd-80e3-de90890503d9" (UID: "a59772f8-fb2b-4ccd-80e3-de90890503d9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.155333 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qp52f" event={"ID":"a59772f8-fb2b-4ccd-80e3-de90890503d9","Type":"ContainerDied","Data":"0aa195244466d7f3d266efda7f1a0ba81d117707dbe66ad583c50a5a4dfac77b"} Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.155635 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0aa195244466d7f3d266efda7f1a0ba81d117707dbe66ad583c50a5a4dfac77b" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.155607 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qp52f" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.208026 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mh8d8"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.211912 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a59772f8-fb2b-4ccd-80e3-de90890503d9" (UID: "a59772f8-fb2b-4ccd-80e3-de90890503d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.221473 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223217 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223287 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223314 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74tdc\" (UniqueName: \"kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223423 4712 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223436 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l56vp\" (UniqueName: \"kubernetes.io/projected/a59772f8-fb2b-4ccd-80e3-de90890503d9-kube-api-access-l56vp\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.223532 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.234085 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jc5lg"] Jan 31 06:00:56 crc kubenswrapper[4712]: E0131 06:00:56.235456 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" containerName="glance-db-sync" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.235480 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" containerName="glance-db-sync" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.235809 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" containerName="glance-db-sync" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.236629 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.238027 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.244484 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data" (OuterVolumeSpecName: "config-data") pod "a59772f8-fb2b-4ccd-80e3-de90890503d9" (UID: "a59772f8-fb2b-4ccd-80e3-de90890503d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.248376 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.248873 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.252228 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.265103 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jc5lg"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.295584 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vj7tw" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.296043 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.296208 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74tdc\" (UniqueName: \"kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc\") pod \"barbican-db-sync-mh8d8\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.301660 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bp49d" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.317058 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.318693 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.327141 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328532 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328616 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328680 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328753 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdjdr\" (UniqueName: \"kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328782 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328811 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328826 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328850 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328932 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.328974 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.329004 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7v9f\" (UniqueName: \"kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.329073 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59772f8-fb2b-4ccd-80e3-de90890503d9-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.417078 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430230 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430330 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdjdr\" (UniqueName: \"kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430379 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430418 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430438 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430457 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430509 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430537 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430557 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7v9f\" (UniqueName: \"kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430582 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.430611 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.431358 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.434273 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.434877 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.434885 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.435251 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.435279 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.448345 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.448562 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.472647 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.477941 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdjdr\" (UniqueName: \"kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr\") pod \"dnsmasq-dns-5bc7599fc-sdkhh\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.490071 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7v9f\" (UniqueName: \"kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f\") pod \"placement-db-sync-jc5lg\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.618350 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jc5lg" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.643793 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.657030 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.698121 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nrqs4"] Jan 31 06:00:56 crc kubenswrapper[4712]: W0131 06:00:56.815570 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53760a64_e8fb_4b13_9bc1_582519540946.slice/crio-58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce WatchSource:0}: Error finding container 58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce: Status 404 returned error can't find the container with id 58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.860511 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-tp7gg"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.936385 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:00:56 crc kubenswrapper[4712]: I0131 06:00:56.982931 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bp49d"] Jan 31 06:00:56 crc kubenswrapper[4712]: W0131 06:00:56.989478 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf515caad_5449_4314_ba23_cc132eba7102.slice/crio-9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4 WatchSource:0}: Error finding container 9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4: Status 404 returned error can't find the container with id 9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4 Jan 31 06:00:57 crc kubenswrapper[4712]: I0131 06:00:57.113930 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mh8d8"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.170619 4712 generic.go:334] "Generic (PLEG): container finished" podID="d46266c1-f282-4531-b3f6-6485b81e28a7" containerID="538ebb3405da8ffc353252dc17ff1f1d7212f216c4c976c0ca2b3fac06b50af5" exitCode=0 Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.170680 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549896c55c-5rr59" event={"ID":"d46266c1-f282-4531-b3f6-6485b81e28a7","Type":"ContainerDied","Data":"538ebb3405da8ffc353252dc17ff1f1d7212f216c4c976c0ca2b3fac06b50af5"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.170708 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549896c55c-5rr59" event={"ID":"d46266c1-f282-4531-b3f6-6485b81e28a7","Type":"ContainerStarted","Data":"55620d831aff4001d66a34736bab3ae4d1c2023065682b48552f56f044667502"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.173231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bp49d" event={"ID":"f515caad-5449-4314-ba23-cc132eba7102","Type":"ContainerStarted","Data":"9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.184555 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tp7gg" event={"ID":"29a76001-83c3-470c-aede-3fe832068688","Type":"ContainerStarted","Data":"8004bd0dce755f0af52ea09b7108d5e7a6f200c44972d4a035287b5483b92e91"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.188154 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mh8d8" event={"ID":"66188e51-a34f-43f3-b12f-ea74a367587f","Type":"ContainerStarted","Data":"7d25de9a38741ac63ff2bf8e3eed6f66b230f271ec2c656da6dc1aafee4747bf"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.199788 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerStarted","Data":"229e0b58a667314ec69376653f0e8b9a6564289e3b2794dad743aae950cf7092"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.202839 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrqs4" event={"ID":"53760a64-e8fb-4b13-9bc1-582519540946","Type":"ContainerStarted","Data":"a3c1890c424c0ba6213feb4be1b0330b91b4dd05de460c5e563d4f94724b179a"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.202884 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrqs4" event={"ID":"53760a64-e8fb-4b13-9bc1-582519540946","Type":"ContainerStarted","Data":"58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.289610 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jc5lg"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.477037 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.593673 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.645287 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.646887 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.680848 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.764897 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.764941 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.764963 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.764985 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.765048 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.765085 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65g4t\" (UniqueName: \"kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.781310 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.782935 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.784883 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.785060 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hlkbc" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.791640 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.796236 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866345 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866791 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65g4t\" (UniqueName: \"kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866845 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfdvl\" (UniqueName: \"kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866878 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866903 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866919 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866938 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.866962 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867061 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867082 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867101 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867124 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867180 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867360 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.867874 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.868335 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.868381 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.872500 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.893842 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65g4t\" (UniqueName: \"kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t\") pod \"dnsmasq-dns-6497bf7d4f-95d9d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968488 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfdvl\" (UniqueName: \"kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968567 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968584 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968601 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968634 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968658 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.968697 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.969067 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.970445 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.970716 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.976651 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.978078 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.980264 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:57.993672 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfdvl\" (UniqueName: \"kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.012462 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.087225 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.121382 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.230996 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" event={"ID":"dc67a839-d9bd-4294-81e7-20271771e9a0","Type":"ContainerStarted","Data":"b50f595a85b8cb84f8f85f0f4729e5bdcd66eb3c14f8162c3298593ac6b72b10"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.231076 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" event={"ID":"dc67a839-d9bd-4294-81e7-20271771e9a0","Type":"ContainerStarted","Data":"57380d5e652465879fabb3f5ded85913835795496f1cfbc634b16821de334c35"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.238654 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jc5lg" event={"ID":"830c0611-e4bf-4fae-96ed-d3a69bdff35f","Type":"ContainerStarted","Data":"887a866531fe91a8c287d9deec72abf36e30e0c395372b0b8e1148ce37b66283"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.241531 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bp49d" event={"ID":"f515caad-5449-4314-ba23-cc132eba7102","Type":"ContainerStarted","Data":"fab21e60b8618323bccecabf09432c2e453bbf57faf938e7fd6c0fec18f5d266"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.310879 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-nrqs4" podStartSLOduration=3.310850218 podStartE2EDuration="3.310850218s" podCreationTimestamp="2026-01-31 06:00:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:00:58.27547923 +0000 UTC m=+1324.369361071" watchObservedRunningTime="2026-01-31 06:00:58.310850218 +0000 UTC m=+1324.404732059" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.320748 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bp49d" podStartSLOduration=3.32072503 podStartE2EDuration="3.32072503s" podCreationTimestamp="2026-01-31 06:00:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:00:58.303451466 +0000 UTC m=+1324.397333307" watchObservedRunningTime="2026-01-31 06:00:58.32072503 +0000 UTC m=+1324.414606861" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.880577 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.882488 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.884880 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.896512 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.993961 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994053 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994088 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994222 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994245 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994292 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj7v2\" (UniqueName: \"kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:58.994343 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.095759 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.095848 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.095880 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.095899 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.095985 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.096006 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.096048 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj7v2\" (UniqueName: \"kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.097651 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.097676 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.100755 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.104479 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.109367 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.117253 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.118245 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj7v2\" (UniqueName: \"kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.125142 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.207336 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.268889 4712 generic.go:334] "Generic (PLEG): container finished" podID="dc67a839-d9bd-4294-81e7-20271771e9a0" containerID="b50f595a85b8cb84f8f85f0f4729e5bdcd66eb3c14f8162c3298593ac6b72b10" exitCode=0 Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:00:59.269036 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" event={"ID":"dc67a839-d9bd-4294-81e7-20271771e9a0","Type":"ContainerDied","Data":"b50f595a85b8cb84f8f85f0f4729e5bdcd66eb3c14f8162c3298593ac6b72b10"} Jan 31 06:01:01 crc kubenswrapper[4712]: I0131 06:01:01.941918 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:02 crc kubenswrapper[4712]: I0131 06:01:02.372127 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:02 crc kubenswrapper[4712]: I0131 06:01:02.467949 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:03 crc kubenswrapper[4712]: I0131 06:01:03.418076 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:05 crc kubenswrapper[4712]: I0131 06:01:05.351704 4712 generic.go:334] "Generic (PLEG): container finished" podID="53760a64-e8fb-4b13-9bc1-582519540946" containerID="a3c1890c424c0ba6213feb4be1b0330b91b4dd05de460c5e563d4f94724b179a" exitCode=0 Jan 31 06:01:05 crc kubenswrapper[4712]: I0131 06:01:05.351808 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrqs4" event={"ID":"53760a64-e8fb-4b13-9bc1-582519540946","Type":"ContainerDied","Data":"a3c1890c424c0ba6213feb4be1b0330b91b4dd05de460c5e563d4f94724b179a"} Jan 31 06:01:05 crc kubenswrapper[4712]: I0131 06:01:05.843610 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:05 crc kubenswrapper[4712]: I0131 06:01:05.918695 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:06 crc kubenswrapper[4712]: I0131 06:01:06.362762 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerStarted","Data":"96228b46547fdee73969d7b5678718e766d64334338d531097737e3ea435af78"} Jan 31 06:01:13 crc kubenswrapper[4712]: W0131 06:01:13.352396 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1bcfd60_2d8c_425f_9240_3f779010ae1d.slice/crio-7fa002e37534591c0133057e945060ad29a4557ee6157c8b71afc4259d7d32dd WatchSource:0}: Error finding container 7fa002e37534591c0133057e945060ad29a4557ee6157c8b71afc4259d7d32dd: Status 404 returned error can't find the container with id 7fa002e37534591c0133057e945060ad29a4557ee6157c8b71afc4259d7d32dd Jan 31 06:01:13 crc kubenswrapper[4712]: I0131 06:01:13.422729 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" event={"ID":"d1bcfd60-2d8c-425f-9240-3f779010ae1d","Type":"ContainerStarted","Data":"7fa002e37534591c0133057e945060ad29a4557ee6157c8b71afc4259d7d32dd"} Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.349978 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.356553 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.400425 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.400492 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.400519 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.408449 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.433192 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.442498 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.445705 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerStarted","Data":"eb3e6ddf93e68fc6d288188b635bfcab73b2109d14f9f6305ea7032ade15ae4a"} Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.447756 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" event={"ID":"dc67a839-d9bd-4294-81e7-20271771e9a0","Type":"ContainerDied","Data":"57380d5e652465879fabb3f5ded85913835795496f1cfbc634b16821de334c35"} Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.447799 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57380d5e652465879fabb3f5ded85913835795496f1cfbc634b16821de334c35" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.449363 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nrqs4" event={"ID":"53760a64-e8fb-4b13-9bc1-582519540946","Type":"ContainerDied","Data":"58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce"} Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.449457 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58ed05bf74bcb90dd9184796ba1ecd65f492a2dfed59448e73524f6048e735ce" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.449572 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nrqs4" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.451225 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549896c55c-5rr59" event={"ID":"d46266c1-f282-4531-b3f6-6485b81e28a7","Type":"ContainerDied","Data":"55620d831aff4001d66a34736bab3ae4d1c2023065682b48552f56f044667502"} Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.451322 4712 scope.go:117] "RemoveContainer" containerID="538ebb3405da8ffc353252dc17ff1f1d7212f216c4c976c0ca2b3fac06b50af5" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.451465 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549896c55c-5rr59" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.494749 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.501970 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwt8k\" (UniqueName: \"kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502121 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7g5b\" (UniqueName: \"kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502197 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502268 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502297 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502362 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502386 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502410 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc\") pod \"d46266c1-f282-4531-b3f6-6485b81e28a7\" (UID: \"d46266c1-f282-4531-b3f6-6485b81e28a7\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.502449 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data\") pod \"53760a64-e8fb-4b13-9bc1-582519540946\" (UID: \"53760a64-e8fb-4b13-9bc1-582519540946\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.505146 4712 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.506055 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.506165 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k" (OuterVolumeSpecName: "kube-api-access-nwt8k") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "kube-api-access-nwt8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.506931 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.522427 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts" (OuterVolumeSpecName: "scripts") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.524592 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.525307 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b" (OuterVolumeSpecName: "kube-api-access-v7g5b") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "kube-api-access-v7g5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.528819 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.532772 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data" (OuterVolumeSpecName: "config-data") pod "53760a64-e8fb-4b13-9bc1-582519540946" (UID: "53760a64-e8fb-4b13-9bc1-582519540946"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.536116 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config" (OuterVolumeSpecName: "config") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.538362 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.544266 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d46266c1-f282-4531-b3f6-6485b81e28a7" (UID: "d46266c1-f282-4531-b3f6-6485b81e28a7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.607877 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.608093 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.608165 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.609405 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.609824 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610027 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdjdr\" (UniqueName: \"kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr\") pod \"dc67a839-d9bd-4294-81e7-20271771e9a0\" (UID: \"dc67a839-d9bd-4294-81e7-20271771e9a0\") " Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610632 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610733 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwt8k\" (UniqueName: \"kubernetes.io/projected/d46266c1-f282-4531-b3f6-6485b81e28a7-kube-api-access-nwt8k\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610815 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7g5b\" (UniqueName: \"kubernetes.io/projected/53760a64-e8fb-4b13-9bc1-582519540946-kube-api-access-v7g5b\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610883 4712 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.610952 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.611032 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.611109 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53760a64-e8fb-4b13-9bc1-582519540946-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.611208 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.611282 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46266c1-f282-4531-b3f6-6485b81e28a7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.613057 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr" (OuterVolumeSpecName: "kube-api-access-sdjdr") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "kube-api-access-sdjdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.626931 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.629437 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.629536 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.630670 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.632080 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config" (OuterVolumeSpecName: "config") pod "dc67a839-d9bd-4294-81e7-20271771e9a0" (UID: "dc67a839-d9bd-4294-81e7-20271771e9a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712757 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712802 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712847 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712858 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712869 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc67a839-d9bd-4294-81e7-20271771e9a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.712881 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdjdr\" (UniqueName: \"kubernetes.io/projected/dc67a839-d9bd-4294-81e7-20271771e9a0-kube-api-access-sdjdr\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.814884 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:01:15 crc kubenswrapper[4712]: I0131 06:01:15.823428 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-549896c55c-5rr59"] Jan 31 06:01:15 crc kubenswrapper[4712]: E0131 06:01:15.849217 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-ceilometer-central:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:01:15 crc kubenswrapper[4712]: E0131 06:01:15.849287 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-ceilometer-central:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:01:15 crc kubenswrapper[4712]: E0131 06:01:15.849458 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-master-centos9/openstack-ceilometer-central:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d6h68dhc8h5fhd9h64chd7hb4h667hb5hfch5d7h5f6h9chcfhfh5dbh5f6h6bh58h5d7h5b5h697h575h99h555h57ch67bhfch54dh68dh54cq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jfwm5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(11cd7ebc-a807-4246-92cf-48c95531e5ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.460445 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc7599fc-sdkhh" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.519146 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d46266c1-f282-4531-b3f6-6485b81e28a7" path="/var/lib/kubelet/pods/d46266c1-f282-4531-b3f6-6485b81e28a7/volumes" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.551190 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.566522 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bc7599fc-sdkhh"] Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.573888 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-nrqs4"] Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.580866 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-nrqs4"] Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652042 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6l275"] Jan 31 06:01:16 crc kubenswrapper[4712]: E0131 06:01:16.652441 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53760a64-e8fb-4b13-9bc1-582519540946" containerName="keystone-bootstrap" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652459 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="53760a64-e8fb-4b13-9bc1-582519540946" containerName="keystone-bootstrap" Jan 31 06:01:16 crc kubenswrapper[4712]: E0131 06:01:16.652477 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc67a839-d9bd-4294-81e7-20271771e9a0" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652483 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc67a839-d9bd-4294-81e7-20271771e9a0" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: E0131 06:01:16.652507 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46266c1-f282-4531-b3f6-6485b81e28a7" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652514 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46266c1-f282-4531-b3f6-6485b81e28a7" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652677 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d46266c1-f282-4531-b3f6-6485b81e28a7" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652696 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="53760a64-e8fb-4b13-9bc1-582519540946" containerName="keystone-bootstrap" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.652708 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc67a839-d9bd-4294-81e7-20271771e9a0" containerName="init" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.653348 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.657132 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.657162 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.657321 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kdwj2" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.657357 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.657559 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.703628 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6l275"] Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.746427 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.746912 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.746963 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.747084 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qmrt\" (UniqueName: \"kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.747115 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.747220 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.848723 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.848768 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.848815 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qmrt\" (UniqueName: \"kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.848833 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.848865 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.849019 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.858125 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.858163 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.858137 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.859010 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.870689 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qmrt\" (UniqueName: \"kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.874284 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts\") pod \"keystone-bootstrap-6l275\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:16 crc kubenswrapper[4712]: I0131 06:01:16.981932 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:18 crc kubenswrapper[4712]: I0131 06:01:18.514943 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53760a64-e8fb-4b13-9bc1-582519540946" path="/var/lib/kubelet/pods/53760a64-e8fb-4b13-9bc1-582519540946/volumes" Jan 31 06:01:18 crc kubenswrapper[4712]: I0131 06:01:18.515692 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc67a839-d9bd-4294-81e7-20271771e9a0" path="/var/lib/kubelet/pods/dc67a839-d9bd-4294-81e7-20271771e9a0/volumes" Jan 31 06:01:24 crc kubenswrapper[4712]: E0131 06:01:24.988951 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:01:24 crc kubenswrapper[4712]: E0131 06:01:24.989970 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:01:24 crc kubenswrapper[4712]: E0131 06:01:24.990600 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ncsts,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-tp7gg_openstack(29a76001-83c3-470c-aede-3fe832068688): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:01:24 crc kubenswrapper[4712]: E0131 06:01:24.991818 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-tp7gg" podUID="29a76001-83c3-470c-aede-3fe832068688" Jan 31 06:01:25 crc kubenswrapper[4712]: I0131 06:01:25.472440 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6l275"] Jan 31 06:01:25 crc kubenswrapper[4712]: I0131 06:01:25.549389 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6l275" event={"ID":"1368cce3-9cef-4215-bb30-e9b16399e5d6","Type":"ContainerStarted","Data":"959d0bd38fcbd2947a749f2326a11b937f253746aa758bea9668ebd736a578d1"} Jan 31 06:01:25 crc kubenswrapper[4712]: E0131 06:01:25.551312 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-cinder-api:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/cinder-db-sync-tp7gg" podUID="29a76001-83c3-470c-aede-3fe832068688" Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.562613 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6l275" event={"ID":"1368cce3-9cef-4215-bb30-e9b16399e5d6","Type":"ContainerStarted","Data":"2cbb289e82cc25f40631b03206a9a0c38750f66f67eb7c4d0888f37a4f0d24e3"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.574015 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerStarted","Data":"b366e9f6ddacc2053aba166c614fda0dbc0142c45e4fb084cc4276b32e0ac01a"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.577093 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jc5lg" event={"ID":"830c0611-e4bf-4fae-96ed-d3a69bdff35f","Type":"ContainerStarted","Data":"d0fd24ec0a30c3ccf1fa7b5a797d57803897051bab6d1a91f87c451dde8172f5"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.579758 4712 generic.go:334] "Generic (PLEG): container finished" podID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerID="b5f438820548a6120727bce52b477ad29039d087578172a3822e5a1dfaedbb1d" exitCode=0 Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.579833 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" event={"ID":"d1bcfd60-2d8c-425f-9240-3f779010ae1d","Type":"ContainerDied","Data":"b5f438820548a6120727bce52b477ad29039d087578172a3822e5a1dfaedbb1d"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.589748 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mh8d8" event={"ID":"66188e51-a34f-43f3-b12f-ea74a367587f","Type":"ContainerStarted","Data":"873f85d491316a17cfbe60221421a4841b9f5ba1ff919757fb7cafff5cde4de2"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.594459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerStarted","Data":"2e4f9827abbf849f6d36075e612008fdf9769053944429a0688eb4415b7e145d"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.596845 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerStarted","Data":"18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.596871 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerStarted","Data":"3163e271e151e6cf98a63ca8a4c97f341356d004cec28186f2ad411f2a931743"} Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.597187 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-log" containerID="cri-o://3163e271e151e6cf98a63ca8a4c97f341356d004cec28186f2ad411f2a931743" gracePeriod=30 Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.597478 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-httpd" containerID="cri-o://18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1" gracePeriod=30 Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.600292 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6l275" podStartSLOduration=10.600228167000001 podStartE2EDuration="10.600228167s" podCreationTimestamp="2026-01-31 06:01:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:26.581960379 +0000 UTC m=+1352.675842220" watchObservedRunningTime="2026-01-31 06:01:26.600228167 +0000 UTC m=+1352.694110038" Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.607962 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jc5lg" podStartSLOduration=3.933565488 podStartE2EDuration="31.607937286s" podCreationTimestamp="2026-01-31 06:00:55 +0000 UTC" firstStartedPulling="2026-01-31 06:00:57.304908363 +0000 UTC m=+1323.398790204" lastFinishedPulling="2026-01-31 06:01:24.979280171 +0000 UTC m=+1351.073162002" observedRunningTime="2026-01-31 06:01:26.605037615 +0000 UTC m=+1352.698919476" watchObservedRunningTime="2026-01-31 06:01:26.607937286 +0000 UTC m=+1352.701819127" Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.678612 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mh8d8" podStartSLOduration=3.822088008 podStartE2EDuration="31.678586768s" podCreationTimestamp="2026-01-31 06:00:55 +0000 UTC" firstStartedPulling="2026-01-31 06:00:57.108154802 +0000 UTC m=+1323.202036643" lastFinishedPulling="2026-01-31 06:01:24.964653562 +0000 UTC m=+1351.058535403" observedRunningTime="2026-01-31 06:01:26.653820061 +0000 UTC m=+1352.747701892" watchObservedRunningTime="2026-01-31 06:01:26.678586768 +0000 UTC m=+1352.772468609" Jan 31 06:01:26 crc kubenswrapper[4712]: I0131 06:01:26.705915 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=29.705888627 podStartE2EDuration="29.705888627s" podCreationTimestamp="2026-01-31 06:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:26.695633566 +0000 UTC m=+1352.789515427" watchObservedRunningTime="2026-01-31 06:01:26.705888627 +0000 UTC m=+1352.799770468" Jan 31 06:01:27 crc kubenswrapper[4712]: E0131 06:01:27.281910 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3db65417_5217_43a7_9959_f2b9c0582fea.slice/crio-conmon-18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1.scope\": RecentStats: unable to find data in memory cache]" Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.614968 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerStarted","Data":"f682accf488d1154725736610dc588a40f7ac02618bc6d609d67fe2310e1f4d4"} Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.615052 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-log" containerID="cri-o://2e4f9827abbf849f6d36075e612008fdf9769053944429a0688eb4415b7e145d" gracePeriod=30 Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.615117 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-httpd" containerID="cri-o://f682accf488d1154725736610dc588a40f7ac02618bc6d609d67fe2310e1f4d4" gracePeriod=30 Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.619447 4712 generic.go:334] "Generic (PLEG): container finished" podID="3db65417-5217-43a7-9959-f2b9c0582fea" containerID="18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1" exitCode=0 Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.619474 4712 generic.go:334] "Generic (PLEG): container finished" podID="3db65417-5217-43a7-9959-f2b9c0582fea" containerID="3163e271e151e6cf98a63ca8a4c97f341356d004cec28186f2ad411f2a931743" exitCode=143 Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.619511 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerDied","Data":"18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1"} Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.619534 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerDied","Data":"3163e271e151e6cf98a63ca8a4c97f341356d004cec28186f2ad411f2a931743"} Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.624280 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" event={"ID":"d1bcfd60-2d8c-425f-9240-3f779010ae1d","Type":"ContainerStarted","Data":"03b9a7328e87a8a96c2da7e35bb56d7535cb7d3269ca6860f4f32fd3eed4be25"} Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.653440 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=30.653418 podStartE2EDuration="30.653418s" podCreationTimestamp="2026-01-31 06:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:27.635153402 +0000 UTC m=+1353.729035263" watchObservedRunningTime="2026-01-31 06:01:27.653418 +0000 UTC m=+1353.747299841" Jan 31 06:01:27 crc kubenswrapper[4712]: I0131 06:01:27.655812 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" podStartSLOduration=30.655800408 podStartE2EDuration="30.655800408s" podCreationTimestamp="2026-01-31 06:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:27.653604294 +0000 UTC m=+1353.747486135" watchObservedRunningTime="2026-01-31 06:01:27.655800408 +0000 UTC m=+1353.749682249" Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.088045 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.121894 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.121948 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.637806 4712 generic.go:334] "Generic (PLEG): container finished" podID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerID="f682accf488d1154725736610dc588a40f7ac02618bc6d609d67fe2310e1f4d4" exitCode=0 Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.637842 4712 generic.go:334] "Generic (PLEG): container finished" podID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerID="2e4f9827abbf849f6d36075e612008fdf9769053944429a0688eb4415b7e145d" exitCode=143 Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.637888 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerDied","Data":"f682accf488d1154725736610dc588a40f7ac02618bc6d609d67fe2310e1f4d4"} Jan 31 06:01:28 crc kubenswrapper[4712]: I0131 06:01:28.638527 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerDied","Data":"2e4f9827abbf849f6d36075e612008fdf9769053944429a0688eb4415b7e145d"} Jan 31 06:01:29 crc kubenswrapper[4712]: I0131 06:01:29.209750 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:29 crc kubenswrapper[4712]: I0131 06:01:29.210466 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:32 crc kubenswrapper[4712]: I0131 06:01:32.676770 4712 generic.go:334] "Generic (PLEG): container finished" podID="1368cce3-9cef-4215-bb30-e9b16399e5d6" containerID="2cbb289e82cc25f40631b03206a9a0c38750f66f67eb7c4d0888f37a4f0d24e3" exitCode=0 Jan 31 06:01:32 crc kubenswrapper[4712]: I0131 06:01:32.677507 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6l275" event={"ID":"1368cce3-9cef-4215-bb30-e9b16399e5d6","Type":"ContainerDied","Data":"2cbb289e82cc25f40631b03206a9a0c38750f66f67eb7c4d0888f37a4f0d24e3"} Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.089426 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.169144 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.169945 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-664494d585-dvq5x" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="dnsmasq-dns" containerID="cri-o://e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681" gracePeriod=10 Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.375962 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.384510 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.439814 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.439870 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.439899 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.439941 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440025 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440044 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfdvl\" (UniqueName: \"kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440063 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440092 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440135 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440160 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440201 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hj7v2\" (UniqueName: \"kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440229 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440249 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs\") pod \"3db65417-5217-43a7-9959-f2b9c0582fea\" (UID: \"3db65417-5217-43a7-9959-f2b9c0582fea\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440270 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run\") pod \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\" (UID: \"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.440936 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.441077 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.446524 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts" (OuterVolumeSpecName: "scripts") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.447112 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.449820 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs" (OuterVolumeSpecName: "logs") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.450742 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl" (OuterVolumeSpecName: "kube-api-access-jfdvl") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "kube-api-access-jfdvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.453725 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.454783 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2" (OuterVolumeSpecName: "kube-api-access-hj7v2") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "kube-api-access-hj7v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.468563 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs" (OuterVolumeSpecName: "logs") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.474443 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts" (OuterVolumeSpecName: "scripts") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.490230 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.496751 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data" (OuterVolumeSpecName: "config-data") pod "3db65417-5217-43a7-9959-f2b9c0582fea" (UID: "3db65417-5217-43a7-9959-f2b9c0582fea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.501618 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.518336 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data" (OuterVolumeSpecName: "config-data") pod "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" (UID: "c16b3fe9-186c-48a1-a871-4f8f3d0b39fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.545936 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.545973 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.545986 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.545999 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546011 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3db65417-5217-43a7-9959-f2b9c0582fea-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546025 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfdvl\" (UniqueName: \"kubernetes.io/projected/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-kube-api-access-jfdvl\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546037 4712 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546047 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546066 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546077 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546088 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hj7v2\" (UniqueName: \"kubernetes.io/projected/3db65417-5217-43a7-9959-f2b9c0582fea-kube-api-access-hj7v2\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546102 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546116 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3db65417-5217-43a7-9959-f2b9c0582fea-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.546127 4712 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.572161 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.584709 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.647466 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.647839 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.647810 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.701420 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3db65417-5217-43a7-9959-f2b9c0582fea","Type":"ContainerDied","Data":"96228b46547fdee73969d7b5678718e766d64334338d531097737e3ea435af78"} Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.701491 4712 scope.go:117] "RemoveContainer" containerID="18f20af8b43fa768a4f96b7c343ce648505b66395a289dee55473013e24735d1" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.701847 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.710301 4712 generic.go:334] "Generic (PLEG): container finished" podID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerID="e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681" exitCode=0 Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.710460 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664494d585-dvq5x" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.710507 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664494d585-dvq5x" event={"ID":"e102ed01-11fa-4873-8e5b-3c4102fbdabe","Type":"ContainerDied","Data":"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681"} Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.710580 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664494d585-dvq5x" event={"ID":"e102ed01-11fa-4873-8e5b-3c4102fbdabe","Type":"ContainerDied","Data":"30207f55c067957c296d8f68f0f4b2b97ca8098c46945c11912fb33016fb2ebf"} Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.716766 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.716963 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c16b3fe9-186c-48a1-a871-4f8f3d0b39fd","Type":"ContainerDied","Data":"eb3e6ddf93e68fc6d288188b635bfcab73b2109d14f9f6305ea7032ade15ae4a"} Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750152 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750211 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750274 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750311 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbbwx\" (UniqueName: \"kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750351 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.750441 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc\") pod \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\" (UID: \"e102ed01-11fa-4873-8e5b-3c4102fbdabe\") " Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.751565 4712 scope.go:117] "RemoveContainer" containerID="3163e271e151e6cf98a63ca8a4c97f341356d004cec28186f2ad411f2a931743" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.769496 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx" (OuterVolumeSpecName: "kube-api-access-zbbwx") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "kube-api-access-zbbwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.779848 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.799161 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.839839 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.851826 4712 scope.go:117] "RemoveContainer" containerID="e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.859241 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbbwx\" (UniqueName: \"kubernetes.io/projected/e102ed01-11fa-4873-8e5b-3c4102fbdabe-kube-api-access-zbbwx\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.862730 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.863917 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.863942 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.863997 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="init" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864003 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="init" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.864016 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864022 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.864032 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864180 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.864208 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864218 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.864242 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="dnsmasq-dns" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864249 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="dnsmasq-dns" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864771 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864789 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-httpd" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864809 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" containerName="dnsmasq-dns" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864822 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.864845 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" containerName="glance-log" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.868835 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.884997 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.885271 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.885721 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hlkbc" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.889409 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.900399 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.926380 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.927824 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.935698 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config" (OuterVolumeSpecName: "config") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.955352 4712 scope.go:117] "RemoveContainer" containerID="ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.958123 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.963145 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljznw\" (UniqueName: \"kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.963316 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967240 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.963355 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967348 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967389 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967452 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967470 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967721 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967833 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967848 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967858 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.967868 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.977299 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.978867 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.981640 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.981645 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.986099 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e102ed01-11fa-4873-8e5b-3c4102fbdabe" (UID: "e102ed01-11fa-4873-8e5b-3c4102fbdabe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.992716 4712 scope.go:117] "RemoveContainer" containerID="e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.993207 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.995757 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681\": container with ID starting with e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681 not found: ID does not exist" containerID="e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.995789 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681"} err="failed to get container status \"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681\": rpc error: code = NotFound desc = could not find container \"e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681\": container with ID starting with e2bba7587fba350ce702d8d8ce6421fd3dcfc650439565b172e445a7c92cd681 not found: ID does not exist" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.995810 4712 scope.go:117] "RemoveContainer" containerID="ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396" Jan 31 06:01:33 crc kubenswrapper[4712]: E0131 06:01:33.996164 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396\": container with ID starting with ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396 not found: ID does not exist" containerID="ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.996200 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396"} err="failed to get container status \"ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396\": rpc error: code = NotFound desc = could not find container \"ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396\": container with ID starting with ba0d911b27e0ef21c7eae992be2570f70add08899649d3ff928130acdb256396 not found: ID does not exist" Jan 31 06:01:33 crc kubenswrapper[4712]: I0131 06:01:33.996213 4712 scope.go:117] "RemoveContainer" containerID="f682accf488d1154725736610dc588a40f7ac02618bc6d609d67fe2310e1f4d4" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.034723 4712 scope.go:117] "RemoveContainer" containerID="2e4f9827abbf849f6d36075e612008fdf9769053944429a0688eb4415b7e145d" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.059690 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.070832 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.070917 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071020 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071060 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071118 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljznw\" (UniqueName: \"kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071185 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h562\" (UniqueName: \"kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071236 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071333 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071409 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071476 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071543 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071629 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071766 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071829 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071862 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.071894 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.072065 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e102ed01-11fa-4873-8e5b-3c4102fbdabe-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.072595 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.072659 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-664494d585-dvq5x"] Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.072964 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.078709 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.080846 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.081126 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.089278 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.089743 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.091992 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljznw\" (UniqueName: \"kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.112585 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.130566 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.175964 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176062 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176209 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qmrt\" (UniqueName: \"kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176302 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176362 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176395 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys\") pod \"1368cce3-9cef-4215-bb30-e9b16399e5d6\" (UID: \"1368cce3-9cef-4215-bb30-e9b16399e5d6\") " Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176667 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h562\" (UniqueName: \"kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176709 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176793 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176849 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176938 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.176974 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.177074 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.177331 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.185333 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.189378 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt" (OuterVolumeSpecName: "kube-api-access-5qmrt") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "kube-api-access-5qmrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.189722 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.189860 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.189947 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.190893 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.192080 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.194430 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.207014 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.212509 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts" (OuterVolumeSpecName: "scripts") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.213042 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.213402 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h562\" (UniqueName: \"kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.220479 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data" (OuterVolumeSpecName: "config-data") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.226892 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1368cce3-9cef-4215-bb30-e9b16399e5d6" (UID: "1368cce3-9cef-4215-bb30-e9b16399e5d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.227295 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.238955 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280715 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280746 4712 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280759 4712 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280768 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280778 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1368cce3-9cef-4215-bb30-e9b16399e5d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.280790 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qmrt\" (UniqueName: \"kubernetes.io/projected/1368cce3-9cef-4215-bb30-e9b16399e5d6-kube-api-access-5qmrt\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.312898 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.521436 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3db65417-5217-43a7-9959-f2b9c0582fea" path="/var/lib/kubelet/pods/3db65417-5217-43a7-9959-f2b9c0582fea/volumes" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.522407 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c16b3fe9-186c-48a1-a871-4f8f3d0b39fd" path="/var/lib/kubelet/pods/c16b3fe9-186c-48a1-a871-4f8f3d0b39fd/volumes" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.529609 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e102ed01-11fa-4873-8e5b-3c4102fbdabe" path="/var/lib/kubelet/pods/e102ed01-11fa-4873-8e5b-3c4102fbdabe/volumes" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.745425 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6l275" event={"ID":"1368cce3-9cef-4215-bb30-e9b16399e5d6","Type":"ContainerDied","Data":"959d0bd38fcbd2947a749f2326a11b937f253746aa758bea9668ebd736a578d1"} Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.745463 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="959d0bd38fcbd2947a749f2326a11b937f253746aa758bea9668ebd736a578d1" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.745528 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6l275" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.761465 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerStarted","Data":"1ca9f919efb669ee992e39db3965820c70aed553d7f7c38b8a7a3a194da504ce"} Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.810768 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-76bf8584d5-c59jx"] Jan 31 06:01:34 crc kubenswrapper[4712]: E0131 06:01:34.811239 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1368cce3-9cef-4215-bb30-e9b16399e5d6" containerName="keystone-bootstrap" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.811257 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1368cce3-9cef-4215-bb30-e9b16399e5d6" containerName="keystone-bootstrap" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.811409 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1368cce3-9cef-4215-bb30-e9b16399e5d6" containerName="keystone-bootstrap" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.811969 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.817757 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-kdwj2" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.818105 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.818259 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.818468 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.819577 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.819718 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821487 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-combined-ca-bundle\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821562 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-public-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821599 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqhkt\" (UniqueName: \"kubernetes.io/projected/623c2dc0-c837-436c-ade9-19c8a8fedfb6-kube-api-access-zqhkt\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821618 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-scripts\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821854 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-internal-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821934 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-fernet-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821958 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-config-data\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.821978 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-credential-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.825610 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-76bf8584d5-c59jx"] Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.901517 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:01:34 crc kubenswrapper[4712]: W0131 06:01:34.911118 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod003300cf_c4ba_470b_aba7_3c63a426e425.slice/crio-920a2bdd98eea37a3b68bda43bda91b63c5925f1d2c1424a1855d2b7a3d2cd95 WatchSource:0}: Error finding container 920a2bdd98eea37a3b68bda43bda91b63c5925f1d2c1424a1855d2b7a3d2cd95: Status 404 returned error can't find the container with id 920a2bdd98eea37a3b68bda43bda91b63c5925f1d2c1424a1855d2b7a3d2cd95 Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923355 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-public-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923418 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqhkt\" (UniqueName: \"kubernetes.io/projected/623c2dc0-c837-436c-ade9-19c8a8fedfb6-kube-api-access-zqhkt\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923442 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-scripts\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923481 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-internal-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923504 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-fernet-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923521 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-config-data\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923537 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-credential-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.923598 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-combined-ca-bundle\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.931843 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-credential-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.931897 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-internal-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.932026 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-config-data\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.932262 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-scripts\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.934011 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-combined-ca-bundle\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.934274 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-public-tls-certs\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.937537 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/623c2dc0-c837-436c-ade9-19c8a8fedfb6-fernet-keys\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.940754 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqhkt\" (UniqueName: \"kubernetes.io/projected/623c2dc0-c837-436c-ade9-19c8a8fedfb6-kube-api-access-zqhkt\") pod \"keystone-76bf8584d5-c59jx\" (UID: \"623c2dc0-c837-436c-ade9-19c8a8fedfb6\") " pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:34 crc kubenswrapper[4712]: I0131 06:01:34.993815 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:01:34 crc kubenswrapper[4712]: W0131 06:01:34.995002 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e6b448d_48ff_4e07_bdea_3b5849ceb177.slice/crio-59804cd2914b42851c0d71a33d8ddbb2339b5fceebb841574929b74f4b280024 WatchSource:0}: Error finding container 59804cd2914b42851c0d71a33d8ddbb2339b5fceebb841574929b74f4b280024: Status 404 returned error can't find the container with id 59804cd2914b42851c0d71a33d8ddbb2339b5fceebb841574929b74f4b280024 Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.131356 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.710220 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-76bf8584d5-c59jx"] Jan 31 06:01:35 crc kubenswrapper[4712]: W0131 06:01:35.718663 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod623c2dc0_c837_436c_ade9_19c8a8fedfb6.slice/crio-cb079419b0f8efcfe9c78160171d393e243a77fcb951bbfe6450d8e7f8c3a4db WatchSource:0}: Error finding container cb079419b0f8efcfe9c78160171d393e243a77fcb951bbfe6450d8e7f8c3a4db: Status 404 returned error can't find the container with id cb079419b0f8efcfe9c78160171d393e243a77fcb951bbfe6450d8e7f8c3a4db Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.785894 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerStarted","Data":"53f0ebddd685206db7ac8cf26daaf377aff3e3de846de3284b0198d67695cdeb"} Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.786752 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerStarted","Data":"59804cd2914b42851c0d71a33d8ddbb2339b5fceebb841574929b74f4b280024"} Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.790396 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerStarted","Data":"be41daf527ed6a6d257b76a18943d18245f49a797a3eda78204b1ab71eeef785"} Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.790442 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerStarted","Data":"920a2bdd98eea37a3b68bda43bda91b63c5925f1d2c1424a1855d2b7a3d2cd95"} Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.792587 4712 generic.go:334] "Generic (PLEG): container finished" podID="830c0611-e4bf-4fae-96ed-d3a69bdff35f" containerID="d0fd24ec0a30c3ccf1fa7b5a797d57803897051bab6d1a91f87c451dde8172f5" exitCode=0 Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.792644 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jc5lg" event={"ID":"830c0611-e4bf-4fae-96ed-d3a69bdff35f","Type":"ContainerDied","Data":"d0fd24ec0a30c3ccf1fa7b5a797d57803897051bab6d1a91f87c451dde8172f5"} Jan 31 06:01:35 crc kubenswrapper[4712]: I0131 06:01:35.801786 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-76bf8584d5-c59jx" event={"ID":"623c2dc0-c837-436c-ade9-19c8a8fedfb6","Type":"ContainerStarted","Data":"cb079419b0f8efcfe9c78160171d393e243a77fcb951bbfe6450d8e7f8c3a4db"} Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.820459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-76bf8584d5-c59jx" event={"ID":"623c2dc0-c837-436c-ade9-19c8a8fedfb6","Type":"ContainerStarted","Data":"f35a19c223d34a0ab09d18c56ec3db13216b779871db4090d438cef0d8f2d146"} Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.820938 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.824805 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerStarted","Data":"b0d408ba7e02fa15ef6bcc304bb0bbc26769dd7735728186d94bc386ebf0abcc"} Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.829750 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerStarted","Data":"05accd9ce82b1c1a73b172f4530252bfe31cc572049b7be75e84fd9d091effe9"} Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.832757 4712 generic.go:334] "Generic (PLEG): container finished" podID="66188e51-a34f-43f3-b12f-ea74a367587f" containerID="873f85d491316a17cfbe60221421a4841b9f5ba1ff919757fb7cafff5cde4de2" exitCode=0 Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.832972 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mh8d8" event={"ID":"66188e51-a34f-43f3-b12f-ea74a367587f","Type":"ContainerDied","Data":"873f85d491316a17cfbe60221421a4841b9f5ba1ff919757fb7cafff5cde4de2"} Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.860793 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-76bf8584d5-c59jx" podStartSLOduration=2.8607708389999997 podStartE2EDuration="2.860770839s" podCreationTimestamp="2026-01-31 06:01:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:36.852763202 +0000 UTC m=+1362.946645043" watchObservedRunningTime="2026-01-31 06:01:36.860770839 +0000 UTC m=+1362.954652680" Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.887956 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.887936734 podStartE2EDuration="3.887936734s" podCreationTimestamp="2026-01-31 06:01:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:36.883562447 +0000 UTC m=+1362.977444288" watchObservedRunningTime="2026-01-31 06:01:36.887936734 +0000 UTC m=+1362.981818565" Jan 31 06:01:36 crc kubenswrapper[4712]: I0131 06:01:36.943798 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.9437720929999998 podStartE2EDuration="3.943772093s" podCreationTimestamp="2026-01-31 06:01:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:36.933764977 +0000 UTC m=+1363.027646828" watchObservedRunningTime="2026-01-31 06:01:36.943772093 +0000 UTC m=+1363.037653934" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.178496 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jc5lg" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.370498 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data\") pod \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.370719 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7v9f\" (UniqueName: \"kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f\") pod \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.371440 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle\") pod \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.371501 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs\") pod \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.371555 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts\") pod \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\" (UID: \"830c0611-e4bf-4fae-96ed-d3a69bdff35f\") " Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.372931 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs" (OuterVolumeSpecName: "logs") pod "830c0611-e4bf-4fae-96ed-d3a69bdff35f" (UID: "830c0611-e4bf-4fae-96ed-d3a69bdff35f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.377061 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts" (OuterVolumeSpecName: "scripts") pod "830c0611-e4bf-4fae-96ed-d3a69bdff35f" (UID: "830c0611-e4bf-4fae-96ed-d3a69bdff35f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.377366 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f" (OuterVolumeSpecName: "kube-api-access-c7v9f") pod "830c0611-e4bf-4fae-96ed-d3a69bdff35f" (UID: "830c0611-e4bf-4fae-96ed-d3a69bdff35f"). InnerVolumeSpecName "kube-api-access-c7v9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.400825 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data" (OuterVolumeSpecName: "config-data") pod "830c0611-e4bf-4fae-96ed-d3a69bdff35f" (UID: "830c0611-e4bf-4fae-96ed-d3a69bdff35f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.401467 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "830c0611-e4bf-4fae-96ed-d3a69bdff35f" (UID: "830c0611-e4bf-4fae-96ed-d3a69bdff35f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.477068 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.477131 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7v9f\" (UniqueName: \"kubernetes.io/projected/830c0611-e4bf-4fae-96ed-d3a69bdff35f-kube-api-access-c7v9f\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.477146 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.477156 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/830c0611-e4bf-4fae-96ed-d3a69bdff35f-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.477186 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830c0611-e4bf-4fae-96ed-d3a69bdff35f-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.846708 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jc5lg" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.846746 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jc5lg" event={"ID":"830c0611-e4bf-4fae-96ed-d3a69bdff35f","Type":"ContainerDied","Data":"887a866531fe91a8c287d9deec72abf36e30e0c395372b0b8e1148ce37b66283"} Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.846787 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="887a866531fe91a8c287d9deec72abf36e30e0c395372b0b8e1148ce37b66283" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.996580 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-778dc5b584-k7lvt"] Jan 31 06:01:37 crc kubenswrapper[4712]: E0131 06:01:37.997477 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830c0611-e4bf-4fae-96ed-d3a69bdff35f" containerName="placement-db-sync" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.997499 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="830c0611-e4bf-4fae-96ed-d3a69bdff35f" containerName="placement-db-sync" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.997736 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="830c0611-e4bf-4fae-96ed-d3a69bdff35f" containerName="placement-db-sync" Jan 31 06:01:37 crc kubenswrapper[4712]: I0131 06:01:37.998936 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.002907 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.002924 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vj7tw" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.003151 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.003314 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.003440 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.015056 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-778dc5b584-k7lvt"] Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092253 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-scripts\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092332 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-combined-ca-bundle\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092362 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpg7v\" (UniqueName: \"kubernetes.io/projected/c1366fba-a1ea-415f-8b63-43648b6b16fb-kube-api-access-wpg7v\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092383 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-public-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092402 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-config-data\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092426 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1366fba-a1ea-415f-8b63-43648b6b16fb-logs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.092499 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-internal-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.192434 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.195921 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-internal-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196001 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-scripts\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196091 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-combined-ca-bundle\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196132 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpg7v\" (UniqueName: \"kubernetes.io/projected/c1366fba-a1ea-415f-8b63-43648b6b16fb-kube-api-access-wpg7v\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196159 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-public-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196220 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-config-data\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.196267 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1366fba-a1ea-415f-8b63-43648b6b16fb-logs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.197030 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1366fba-a1ea-415f-8b63-43648b6b16fb-logs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.200728 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-internal-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.201018 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-scripts\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.201377 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-combined-ca-bundle\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.202603 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-config-data\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.203427 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1366fba-a1ea-415f-8b63-43648b6b16fb-public-tls-certs\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.215547 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpg7v\" (UniqueName: \"kubernetes.io/projected/c1366fba-a1ea-415f-8b63-43648b6b16fb-kube-api-access-wpg7v\") pod \"placement-778dc5b584-k7lvt\" (UID: \"c1366fba-a1ea-415f-8b63-43648b6b16fb\") " pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.297502 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74tdc\" (UniqueName: \"kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc\") pod \"66188e51-a34f-43f3-b12f-ea74a367587f\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.297758 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data\") pod \"66188e51-a34f-43f3-b12f-ea74a367587f\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.297798 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle\") pod \"66188e51-a34f-43f3-b12f-ea74a367587f\" (UID: \"66188e51-a34f-43f3-b12f-ea74a367587f\") " Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.301677 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "66188e51-a34f-43f3-b12f-ea74a367587f" (UID: "66188e51-a34f-43f3-b12f-ea74a367587f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.304064 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc" (OuterVolumeSpecName: "kube-api-access-74tdc") pod "66188e51-a34f-43f3-b12f-ea74a367587f" (UID: "66188e51-a34f-43f3-b12f-ea74a367587f"). InnerVolumeSpecName "kube-api-access-74tdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.331241 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.346585 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66188e51-a34f-43f3-b12f-ea74a367587f" (UID: "66188e51-a34f-43f3-b12f-ea74a367587f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.399870 4712 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.400401 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66188e51-a34f-43f3-b12f-ea74a367587f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.400417 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74tdc\" (UniqueName: \"kubernetes.io/projected/66188e51-a34f-43f3-b12f-ea74a367587f-kube-api-access-74tdc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.859809 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tp7gg" event={"ID":"29a76001-83c3-470c-aede-3fe832068688","Type":"ContainerStarted","Data":"5c299ce006082fecc5ad33df1f9ba1d2d6e0e13f17b30fe671d78fa56f59e00f"} Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.863666 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mh8d8" event={"ID":"66188e51-a34f-43f3-b12f-ea74a367587f","Type":"ContainerDied","Data":"7d25de9a38741ac63ff2bf8e3eed6f66b230f271ec2c656da6dc1aafee4747bf"} Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.863712 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d25de9a38741ac63ff2bf8e3eed6f66b230f271ec2c656da6dc1aafee4747bf" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.863770 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mh8d8" Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.885393 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-778dc5b584-k7lvt"] Jan 31 06:01:38 crc kubenswrapper[4712]: I0131 06:01:38.888495 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-tp7gg" podStartSLOduration=3.058673316 podStartE2EDuration="43.888475294s" podCreationTimestamp="2026-01-31 06:00:55 +0000 UTC" firstStartedPulling="2026-01-31 06:00:56.865453983 +0000 UTC m=+1322.959335824" lastFinishedPulling="2026-01-31 06:01:37.695255961 +0000 UTC m=+1363.789137802" observedRunningTime="2026-01-31 06:01:38.877847954 +0000 UTC m=+1364.971729805" watchObservedRunningTime="2026-01-31 06:01:38.888475294 +0000 UTC m=+1364.982357135" Jan 31 06:01:38 crc kubenswrapper[4712]: W0131 06:01:38.900650 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1366fba_a1ea_415f_8b63_43648b6b16fb.slice/crio-441b1bdc5bf6fd554b05c4b5e0f2ddfda9ad25e1e490c87cb2991961e0832323 WatchSource:0}: Error finding container 441b1bdc5bf6fd554b05c4b5e0f2ddfda9ad25e1e490c87cb2991961e0832323: Status 404 returned error can't find the container with id 441b1bdc5bf6fd554b05c4b5e0f2ddfda9ad25e1e490c87cb2991961e0832323 Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.198522 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5f578d49bf-ffhtj"] Jan 31 06:01:39 crc kubenswrapper[4712]: E0131 06:01:39.200041 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66188e51-a34f-43f3-b12f-ea74a367587f" containerName="barbican-db-sync" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.200161 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="66188e51-a34f-43f3-b12f-ea74a367587f" containerName="barbican-db-sync" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.200762 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="66188e51-a34f-43f3-b12f-ea74a367587f" containerName="barbican-db-sync" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.207885 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.250714 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kt4kg" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.251864 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.252805 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.278841 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f578d49bf-ffhtj"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.299302 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-77f65dc848-97sk5"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.320041 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-77f65dc848-97sk5"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.321612 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.326270 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.329276 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-combined-ca-bundle\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.329425 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.329570 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872694e2-8988-499c-b05b-3597e7d4e327-logs\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.329654 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktgzm\" (UniqueName: \"kubernetes.io/projected/872694e2-8988-499c-b05b-3597e7d4e327-kube-api-access-ktgzm\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.329741 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data-custom\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.338217 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.339829 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.374725 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.433444 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.433512 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.433548 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-combined-ca-bundle\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.433581 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-279dr\" (UniqueName: \"kubernetes.io/projected/8bac1193-72ba-4208-9e62-9eae63196d1c-kube-api-access-279dr\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.435946 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872694e2-8988-499c-b05b-3597e7d4e327-logs\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.435991 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktgzm\" (UniqueName: \"kubernetes.io/projected/872694e2-8988-499c-b05b-3597e7d4e327-kube-api-access-ktgzm\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.436013 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fw47\" (UniqueName: \"kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.436069 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data-custom\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.436145 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bac1193-72ba-4208-9e62-9eae63196d1c-logs\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.437661 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.437705 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.437775 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.437822 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data-custom\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.437860 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.438099 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-combined-ca-bundle\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.438143 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.441814 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872694e2-8988-499c-b05b-3597e7d4e327-logs\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.441916 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.443961 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data-custom\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.445825 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-config-data\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.447468 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.452684 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.471497 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872694e2-8988-499c-b05b-3597e7d4e327-combined-ca-bundle\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.490215 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktgzm\" (UniqueName: \"kubernetes.io/projected/872694e2-8988-499c-b05b-3597e7d4e327-kube-api-access-ktgzm\") pod \"barbican-worker-5f578d49bf-ffhtj\" (UID: \"872694e2-8988-499c-b05b-3597e7d4e327\") " pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.501140 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.502710 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.517780 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540039 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540112 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540199 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540228 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-combined-ca-bundle\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540261 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-279dr\" (UniqueName: \"kubernetes.io/projected/8bac1193-72ba-4208-9e62-9eae63196d1c-kube-api-access-279dr\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540286 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fw47\" (UniqueName: \"kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540322 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dswpg\" (UniqueName: \"kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540354 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bac1193-72ba-4208-9e62-9eae63196d1c-logs\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540379 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540410 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540435 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540467 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540488 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data-custom\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.540511 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.541701 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.542244 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.543550 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.543817 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bac1193-72ba-4208-9e62-9eae63196d1c-logs\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.544365 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.544875 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.548970 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data-custom\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.552097 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-combined-ca-bundle\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.557004 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.562958 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-279dr\" (UniqueName: \"kubernetes.io/projected/8bac1193-72ba-4208-9e62-9eae63196d1c-kube-api-access-279dr\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.563507 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bac1193-72ba-4208-9e62-9eae63196d1c-config-data\") pod \"barbican-keystone-listener-77f65dc848-97sk5\" (UID: \"8bac1193-72ba-4208-9e62-9eae63196d1c\") " pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.567209 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fw47\" (UniqueName: \"kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47\") pod \"dnsmasq-dns-54d644b8ff-2w9fj\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.582372 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f578d49bf-ffhtj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642712 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642787 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dswpg\" (UniqueName: \"kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642834 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642874 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4sr5\" (UniqueName: \"kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642927 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642951 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.642976 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.643005 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.643422 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.643515 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.650367 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.657836 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.666119 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dswpg\" (UniqueName: \"kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg\") pod \"redhat-operators-l6lfz\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.744641 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4sr5\" (UniqueName: \"kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.745142 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.745247 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.745315 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.745381 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.748538 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.756985 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.758209 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.767949 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.784500 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4sr5\" (UniqueName: \"kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5\") pod \"barbican-api-7566766d6b-j72k9\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.904710 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-778dc5b584-k7lvt" event={"ID":"c1366fba-a1ea-415f-8b63-43648b6b16fb","Type":"ContainerStarted","Data":"ab7e36043cf62cdbf8e318ca2d3c07c0c25b10a077238246b121b25f2abc795e"} Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.904756 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-778dc5b584-k7lvt" event={"ID":"c1366fba-a1ea-415f-8b63-43648b6b16fb","Type":"ContainerStarted","Data":"2de6347842dbb2145906b38916b35265529adcd3a54779b38531b89c8336f347"} Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.904765 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-778dc5b584-k7lvt" event={"ID":"c1366fba-a1ea-415f-8b63-43648b6b16fb","Type":"ContainerStarted","Data":"441b1bdc5bf6fd554b05c4b5e0f2ddfda9ad25e1e490c87cb2991961e0832323"} Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.906121 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.906152 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.929932 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.944662 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:39 crc kubenswrapper[4712]: I0131 06:01:39.949423 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-778dc5b584-k7lvt" podStartSLOduration=2.949404187 podStartE2EDuration="2.949404187s" podCreationTimestamp="2026-01-31 06:01:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:39.934198944 +0000 UTC m=+1366.028080785" watchObservedRunningTime="2026-01-31 06:01:39.949404187 +0000 UTC m=+1366.043286028" Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.159634 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f578d49bf-ffhtj"] Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.225328 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-77f65dc848-97sk5"] Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.322598 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:01:40 crc kubenswrapper[4712]: W0131 06:01:40.688537 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec5410d1_eedd_4ba5_b403_f52f970dafc1.slice/crio-e6fddc7a82607a005203c79c9acd4339774a5f7847b7c836ed847a9116a92127 WatchSource:0}: Error finding container e6fddc7a82607a005203c79c9acd4339774a5f7847b7c836ed847a9116a92127: Status 404 returned error can't find the container with id e6fddc7a82607a005203c79c9acd4339774a5f7847b7c836ed847a9116a92127 Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.691649 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.699647 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.919937 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerStarted","Data":"61f0d06506088cf8d5df8c1775e74daa2b559846e5b922090eb4779a3aeb38c2"} Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.920449 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerStarted","Data":"c5fad597cbc4503f1ef7495104ce2d5f1e1853b5aefa5fb70fc7ad085d6320b2"} Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.923789 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" event={"ID":"8bac1193-72ba-4208-9e62-9eae63196d1c","Type":"ContainerStarted","Data":"f8e7800ccc7796cfe4bd78a95fdae0b6c193a64a1a6dc7885b83c38ebc151528"} Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.944511 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerStarted","Data":"5ed86ea733a19c6793a045b195e8717b6a5c812f710a1cdc483aae35db4e779b"} Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.950005 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f578d49bf-ffhtj" event={"ID":"872694e2-8988-499c-b05b-3597e7d4e327","Type":"ContainerStarted","Data":"d798e4c6f46524dab3afce6b7aa1a8340e464e671ec44e51d825d00a2e9da9cb"} Jan 31 06:01:40 crc kubenswrapper[4712]: I0131 06:01:40.959121 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerStarted","Data":"e6fddc7a82607a005203c79c9acd4339774a5f7847b7c836ed847a9116a92127"} Jan 31 06:01:41 crc kubenswrapper[4712]: I0131 06:01:41.969861 4712 generic.go:334] "Generic (PLEG): container finished" podID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerID="61f0d06506088cf8d5df8c1775e74daa2b559846e5b922090eb4779a3aeb38c2" exitCode=0 Jan 31 06:01:41 crc kubenswrapper[4712]: I0131 06:01:41.969976 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerDied","Data":"61f0d06506088cf8d5df8c1775e74daa2b559846e5b922090eb4779a3aeb38c2"} Jan 31 06:01:41 crc kubenswrapper[4712]: I0131 06:01:41.974034 4712 generic.go:334] "Generic (PLEG): container finished" podID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerID="1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8" exitCode=0 Jan 31 06:01:41 crc kubenswrapper[4712]: I0131 06:01:41.974139 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerDied","Data":"1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8"} Jan 31 06:01:41 crc kubenswrapper[4712]: I0131 06:01:41.976025 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerStarted","Data":"ef756dd7ca78401fe069870595145aa9884a84b3d85f93c48f6c46c9d35dc35d"} Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.756949 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5ff848578-hzzpm"] Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.762567 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.765606 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.765782 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.779474 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ff848578-hzzpm"] Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845525 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-combined-ca-bundle\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845602 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845682 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/913d1fd5-e3ce-4632-abda-a7161638d494-logs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845723 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-public-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845800 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-internal-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845882 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cbwm\" (UniqueName: \"kubernetes.io/projected/913d1fd5-e3ce-4632-abda-a7161638d494-kube-api-access-2cbwm\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.845974 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data-custom\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947360 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-internal-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947507 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cbwm\" (UniqueName: \"kubernetes.io/projected/913d1fd5-e3ce-4632-abda-a7161638d494-kube-api-access-2cbwm\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947559 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data-custom\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947641 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-combined-ca-bundle\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947665 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947726 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/913d1fd5-e3ce-4632-abda-a7161638d494-logs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.947753 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-public-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.948125 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/913d1fd5-e3ce-4632-abda-a7161638d494-logs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.954369 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-combined-ca-bundle\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.954984 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-public-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.957343 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-internal-tls-certs\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.963214 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data-custom\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.964698 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cbwm\" (UniqueName: \"kubernetes.io/projected/913d1fd5-e3ce-4632-abda-a7161638d494-kube-api-access-2cbwm\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:42 crc kubenswrapper[4712]: I0131 06:01:42.965827 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/913d1fd5-e3ce-4632-abda-a7161638d494-config-data\") pod \"barbican-api-5ff848578-hzzpm\" (UID: \"913d1fd5-e3ce-4632-abda-a7161638d494\") " pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:43 crc kubenswrapper[4712]: I0131 06:01:43.086253 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.227892 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.227953 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.260678 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.294998 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.314257 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.314297 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.356755 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 31 06:01:44 crc kubenswrapper[4712]: I0131 06:01:44.373888 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 31 06:01:45 crc kubenswrapper[4712]: I0131 06:01:45.010497 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:45 crc kubenswrapper[4712]: I0131 06:01:45.010548 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:01:45 crc kubenswrapper[4712]: I0131 06:01:45.010568 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:45 crc kubenswrapper[4712]: I0131 06:01:45.010580 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:01:46 crc kubenswrapper[4712]: I0131 06:01:46.910287 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 31 06:01:46 crc kubenswrapper[4712]: I0131 06:01:46.914566 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:46 crc kubenswrapper[4712]: I0131 06:01:46.915139 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 31 06:01:46 crc kubenswrapper[4712]: I0131 06:01:46.949747 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 31 06:01:49 crc kubenswrapper[4712]: I0131 06:01:49.818414 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ff848578-hzzpm"] Jan 31 06:01:49 crc kubenswrapper[4712]: W0131 06:01:49.828660 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod913d1fd5_e3ce_4632_abda_a7161638d494.slice/crio-1fcfcd8a4000be5dd38584bb8d40b0dda76ba8f3b793a822a2928b39b2277856 WatchSource:0}: Error finding container 1fcfcd8a4000be5dd38584bb8d40b0dda76ba8f3b793a822a2928b39b2277856: Status 404 returned error can't find the container with id 1fcfcd8a4000be5dd38584bb8d40b0dda76ba8f3b793a822a2928b39b2277856 Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.055025 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" event={"ID":"8bac1193-72ba-4208-9e62-9eae63196d1c","Type":"ContainerStarted","Data":"a401a7f7573e230dfab1d81f45686ef0fcbd28f60f1ec509d83eaa23880e57d7"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.057601 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerStarted","Data":"68343edf63ea42e58ed8625533c1312e310cb7b88b7ec1905b76e6a5cdfacf0f"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.059852 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerStarted","Data":"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.062405 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f578d49bf-ffhtj" event={"ID":"872694e2-8988-499c-b05b-3597e7d4e327","Type":"ContainerStarted","Data":"ea508c6b8beade303fc91dd55378ffe4cc375ca70ba8331fb42e3c3b146027a4"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.065618 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerStarted","Data":"267293ec3f126bffbe399efe83e867bf3beeef11fcc3f369b3c6307cbede2a6a"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.065667 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.066530 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.068590 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerStarted","Data":"ea4754ea4860879bcd24269802a1feed811f143a90297096962b670877941963"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.068711 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.069784 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff848578-hzzpm" event={"ID":"913d1fd5-e3ce-4632-abda-a7161638d494","Type":"ContainerStarted","Data":"1fcfcd8a4000be5dd38584bb8d40b0dda76ba8f3b793a822a2928b39b2277856"} Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.118346 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7566766d6b-j72k9" podStartSLOduration=11.118328202 podStartE2EDuration="11.118328202s" podCreationTimestamp="2026-01-31 06:01:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:50.117644075 +0000 UTC m=+1376.211525916" watchObservedRunningTime="2026-01-31 06:01:50.118328202 +0000 UTC m=+1376.212210043" Jan 31 06:01:50 crc kubenswrapper[4712]: I0131 06:01:50.145563 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" podStartSLOduration=11.145545389 podStartE2EDuration="11.145545389s" podCreationTimestamp="2026-01-31 06:01:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:50.140670769 +0000 UTC m=+1376.234552600" watchObservedRunningTime="2026-01-31 06:01:50.145545389 +0000 UTC m=+1376.239427220" Jan 31 06:01:50 crc kubenswrapper[4712]: E0131 06:01:50.329819 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.083743 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" event={"ID":"8bac1193-72ba-4208-9e62-9eae63196d1c","Type":"ContainerStarted","Data":"bbd4d32b07e0457291afb6766b6785d3658e4113a63d0bb6a33bd857d583b84b"} Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.086685 4712 generic.go:334] "Generic (PLEG): container finished" podID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerID="84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc" exitCode=0 Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.086979 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerDied","Data":"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc"} Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.090101 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f578d49bf-ffhtj" event={"ID":"872694e2-8988-499c-b05b-3597e7d4e327","Type":"ContainerStarted","Data":"4907c708a7e8fca242131f05cf81bf5b32aa0d880152a5877742b7f199100b54"} Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.094216 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff848578-hzzpm" event={"ID":"913d1fd5-e3ce-4632-abda-a7161638d494","Type":"ContainerStarted","Data":"a829701ab6599002b3f979e543d313c1b37ca0eac147efcf0ce932f07e3ae095"} Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.094300 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="sg-core" containerID="cri-o://1ca9f919efb669ee992e39db3965820c70aed553d7f7c38b8a7a3a194da504ce" gracePeriod=30 Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.094307 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="proxy-httpd" containerID="cri-o://68343edf63ea42e58ed8625533c1312e310cb7b88b7ec1905b76e6a5cdfacf0f" gracePeriod=30 Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.095927 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.094084 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="ceilometer-notification-agent" containerID="cri-o://b366e9f6ddacc2053aba166c614fda0dbc0142c45e4fb084cc4276b32e0ac01a" gracePeriod=30 Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.111773 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-77f65dc848-97sk5" podStartSLOduration=3.041171881 podStartE2EDuration="12.111745729s" podCreationTimestamp="2026-01-31 06:01:39 +0000 UTC" firstStartedPulling="2026-01-31 06:01:40.283676178 +0000 UTC m=+1366.377558019" lastFinishedPulling="2026-01-31 06:01:49.354250026 +0000 UTC m=+1375.448131867" observedRunningTime="2026-01-31 06:01:51.110326494 +0000 UTC m=+1377.204208375" watchObservedRunningTime="2026-01-31 06:01:51.111745729 +0000 UTC m=+1377.205627610" Jan 31 06:01:51 crc kubenswrapper[4712]: I0131 06:01:51.209928 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5f578d49bf-ffhtj" podStartSLOduration=3.097113282 podStartE2EDuration="12.209914385s" podCreationTimestamp="2026-01-31 06:01:39 +0000 UTC" firstStartedPulling="2026-01-31 06:01:40.173020176 +0000 UTC m=+1366.266902017" lastFinishedPulling="2026-01-31 06:01:49.285821279 +0000 UTC m=+1375.379703120" observedRunningTime="2026-01-31 06:01:51.205123588 +0000 UTC m=+1377.299005429" watchObservedRunningTime="2026-01-31 06:01:51.209914385 +0000 UTC m=+1377.303796226" Jan 31 06:01:52 crc kubenswrapper[4712]: I0131 06:01:52.142431 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff848578-hzzpm" event={"ID":"913d1fd5-e3ce-4632-abda-a7161638d494","Type":"ContainerStarted","Data":"616d3cfd1b5072ba6bd661bd4a432a40c662a3464c282b10b948fad6406c3c85"} Jan 31 06:01:52 crc kubenswrapper[4712]: I0131 06:01:52.143769 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:52 crc kubenswrapper[4712]: I0131 06:01:52.143958 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:52 crc kubenswrapper[4712]: I0131 06:01:52.183411 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5ff848578-hzzpm" podStartSLOduration=10.183388613 podStartE2EDuration="10.183388613s" podCreationTimestamp="2026-01-31 06:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:01:52.160948703 +0000 UTC m=+1378.254830544" watchObservedRunningTime="2026-01-31 06:01:52.183388613 +0000 UTC m=+1378.277270454" Jan 31 06:01:53 crc kubenswrapper[4712]: I0131 06:01:53.158919 4712 generic.go:334] "Generic (PLEG): container finished" podID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerID="68343edf63ea42e58ed8625533c1312e310cb7b88b7ec1905b76e6a5cdfacf0f" exitCode=0 Jan 31 06:01:53 crc kubenswrapper[4712]: I0131 06:01:53.158965 4712 generic.go:334] "Generic (PLEG): container finished" podID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerID="1ca9f919efb669ee992e39db3965820c70aed553d7f7c38b8a7a3a194da504ce" exitCode=2 Jan 31 06:01:53 crc kubenswrapper[4712]: I0131 06:01:53.159017 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerDied","Data":"68343edf63ea42e58ed8625533c1312e310cb7b88b7ec1905b76e6a5cdfacf0f"} Jan 31 06:01:53 crc kubenswrapper[4712]: I0131 06:01:53.159067 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerDied","Data":"1ca9f919efb669ee992e39db3965820c70aed553d7f7c38b8a7a3a194da504ce"} Jan 31 06:01:54 crc kubenswrapper[4712]: I0131 06:01:54.663512 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:01:54 crc kubenswrapper[4712]: I0131 06:01:54.733635 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:54 crc kubenswrapper[4712]: I0131 06:01:54.733906 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="dnsmasq-dns" containerID="cri-o://03b9a7328e87a8a96c2da7e35bb56d7535cb7d3269ca6860f4f32fd3eed4be25" gracePeriod=10 Jan 31 06:01:55 crc kubenswrapper[4712]: I0131 06:01:55.109436 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:01:55 crc kubenswrapper[4712]: I0131 06:01:55.181561 4712 generic.go:334] "Generic (PLEG): container finished" podID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerID="b366e9f6ddacc2053aba166c614fda0dbc0142c45e4fb084cc4276b32e0ac01a" exitCode=0 Jan 31 06:01:55 crc kubenswrapper[4712]: I0131 06:01:55.181652 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerDied","Data":"b366e9f6ddacc2053aba166c614fda0dbc0142c45e4fb084cc4276b32e0ac01a"} Jan 31 06:01:55 crc kubenswrapper[4712]: I0131 06:01:55.704096 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.152734 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.215189 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11cd7ebc-a807-4246-92cf-48c95531e5ac","Type":"ContainerDied","Data":"229e0b58a667314ec69376653f0e8b9a6564289e3b2794dad743aae950cf7092"} Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.215236 4712 scope.go:117] "RemoveContainer" containerID="68343edf63ea42e58ed8625533c1312e310cb7b88b7ec1905b76e6a5cdfacf0f" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.215368 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.219291 4712 generic.go:334] "Generic (PLEG): container finished" podID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerID="03b9a7328e87a8a96c2da7e35bb56d7535cb7d3269ca6860f4f32fd3eed4be25" exitCode=0 Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.219340 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" event={"ID":"d1bcfd60-2d8c-425f-9240-3f779010ae1d","Type":"ContainerDied","Data":"03b9a7328e87a8a96c2da7e35bb56d7535cb7d3269ca6860f4f32fd3eed4be25"} Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.224715 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerStarted","Data":"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3"} Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.250556 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l6lfz" podStartSLOduration=6.106278814 podStartE2EDuration="17.250537453s" podCreationTimestamp="2026-01-31 06:01:39 +0000 UTC" firstStartedPulling="2026-01-31 06:01:44.539284998 +0000 UTC m=+1370.633166829" lastFinishedPulling="2026-01-31 06:01:55.683543617 +0000 UTC m=+1381.777425468" observedRunningTime="2026-01-31 06:01:56.249351964 +0000 UTC m=+1382.343233805" watchObservedRunningTime="2026-01-31 06:01:56.250537453 +0000 UTC m=+1382.344419294" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.269660 4712 scope.go:117] "RemoveContainer" containerID="1ca9f919efb669ee992e39db3965820c70aed553d7f7c38b8a7a3a194da504ce" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.292359 4712 scope.go:117] "RemoveContainer" containerID="b366e9f6ddacc2053aba166c614fda0dbc0142c45e4fb084cc4276b32e0ac01a" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.318992 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319045 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319076 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319141 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319333 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319435 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfwm5\" (UniqueName: \"kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.319531 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd\") pod \"11cd7ebc-a807-4246-92cf-48c95531e5ac\" (UID: \"11cd7ebc-a807-4246-92cf-48c95531e5ac\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.327200 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.327630 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.329530 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts" (OuterVolumeSpecName: "scripts") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.330409 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5" (OuterVolumeSpecName: "kube-api-access-jfwm5") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "kube-api-access-jfwm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.377383 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.385354 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.398689 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422122 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422197 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422213 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422248 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422261 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfwm5\" (UniqueName: \"kubernetes.io/projected/11cd7ebc-a807-4246-92cf-48c95531e5ac-kube-api-access-jfwm5\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.422272 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11cd7ebc-a807-4246-92cf-48c95531e5ac-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.423982 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data" (OuterVolumeSpecName: "config-data") pod "11cd7ebc-a807-4246-92cf-48c95531e5ac" (UID: "11cd7ebc-a807-4246-92cf-48c95531e5ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.477783 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525482 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525562 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65g4t\" (UniqueName: \"kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525683 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525775 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525847 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.525878 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc\") pod \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\" (UID: \"d1bcfd60-2d8c-425f-9240-3f779010ae1d\") " Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.526498 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11cd7ebc-a807-4246-92cf-48c95531e5ac-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.529264 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t" (OuterVolumeSpecName: "kube-api-access-65g4t") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "kube-api-access-65g4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.588795 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.592554 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.593543 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.608660 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.615402 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627235 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:56 crc kubenswrapper[4712]: E0131 06:01:56.627762 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="proxy-httpd" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627782 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="proxy-httpd" Jan 31 06:01:56 crc kubenswrapper[4712]: E0131 06:01:56.627833 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="sg-core" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627841 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="sg-core" Jan 31 06:01:56 crc kubenswrapper[4712]: E0131 06:01:56.627850 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="ceilometer-notification-agent" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627857 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="ceilometer-notification-agent" Jan 31 06:01:56 crc kubenswrapper[4712]: E0131 06:01:56.627865 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="dnsmasq-dns" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627872 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="dnsmasq-dns" Jan 31 06:01:56 crc kubenswrapper[4712]: E0131 06:01:56.627886 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="init" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.627911 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="init" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628165 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="ceilometer-notification-agent" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628210 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="proxy-httpd" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628222 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" containerName="sg-core" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628256 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" containerName="dnsmasq-dns" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628829 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628844 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628855 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.628868 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65g4t\" (UniqueName: \"kubernetes.io/projected/d1bcfd60-2d8c-425f-9240-3f779010ae1d-kube-api-access-65g4t\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.630553 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.635654 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.635848 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.636778 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.639620 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config" (OuterVolumeSpecName: "config") pod "d1bcfd60-2d8c-425f-9240-3f779010ae1d" (UID: "d1bcfd60-2d8c-425f-9240-3f779010ae1d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.648153 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.728881 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734144 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734221 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734248 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734341 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l6t7\" (UniqueName: \"kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734393 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734466 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734732 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734920 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.734941 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d1bcfd60-2d8c-425f-9240-3f779010ae1d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.837202 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.837902 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.838048 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.838121 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.838225 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.838293 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.838368 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l6t7\" (UniqueName: \"kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.839237 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.840867 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.842332 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.844937 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.845587 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.847125 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.874132 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l6t7\" (UniqueName: \"kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7\") pod \"ceilometer-0\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " pod="openstack/ceilometer-0" Jan 31 06:01:56 crc kubenswrapper[4712]: I0131 06:01:56.951674 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.236715 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" event={"ID":"d1bcfd60-2d8c-425f-9240-3f779010ae1d","Type":"ContainerDied","Data":"7fa002e37534591c0133057e945060ad29a4557ee6157c8b71afc4259d7d32dd"} Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.237045 4712 scope.go:117] "RemoveContainer" containerID="03b9a7328e87a8a96c2da7e35bb56d7535cb7d3269ca6860f4f32fd3eed4be25" Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.236775 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6497bf7d4f-95d9d" Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.269940 4712 scope.go:117] "RemoveContainer" containerID="b5f438820548a6120727bce52b477ad29039d087578172a3822e5a1dfaedbb1d" Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.283214 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.290765 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6497bf7d4f-95d9d"] Jan 31 06:01:57 crc kubenswrapper[4712]: I0131 06:01:57.396357 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:01:57 crc kubenswrapper[4712]: W0131 06:01:57.401576 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48089928_0ffc_4d25_adf9_57f7874477c2.slice/crio-6a86fb333d3686c641ae305e44e580b8eba500ad1b46cd8f6a115e745657c580 WatchSource:0}: Error finding container 6a86fb333d3686c641ae305e44e580b8eba500ad1b46cd8f6a115e745657c580: Status 404 returned error can't find the container with id 6a86fb333d3686c641ae305e44e580b8eba500ad1b46cd8f6a115e745657c580 Jan 31 06:01:58 crc kubenswrapper[4712]: I0131 06:01:58.246728 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerStarted","Data":"04713538ecd0d6f2f98417d580ccc209c6f73c5a5eff465a0ecb7dc62c7009c7"} Jan 31 06:01:58 crc kubenswrapper[4712]: I0131 06:01:58.247235 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerStarted","Data":"6a86fb333d3686c641ae305e44e580b8eba500ad1b46cd8f6a115e745657c580"} Jan 31 06:01:58 crc kubenswrapper[4712]: I0131 06:01:58.516034 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11cd7ebc-a807-4246-92cf-48c95531e5ac" path="/var/lib/kubelet/pods/11cd7ebc-a807-4246-92cf-48c95531e5ac/volumes" Jan 31 06:01:58 crc kubenswrapper[4712]: I0131 06:01:58.516854 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1bcfd60-2d8c-425f-9240-3f779010ae1d" path="/var/lib/kubelet/pods/d1bcfd60-2d8c-425f-9240-3f779010ae1d/volumes" Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.259932 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerStarted","Data":"b378a9997bde935cc0cd9c6355e147ced72ac937b80d86f1ca07d3fa7274b7c4"} Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.261906 4712 generic.go:334] "Generic (PLEG): container finished" podID="29a76001-83c3-470c-aede-3fe832068688" containerID="5c299ce006082fecc5ad33df1f9ba1d2d6e0e13f17b30fe671d78fa56f59e00f" exitCode=0 Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.261993 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tp7gg" event={"ID":"29a76001-83c3-470c-aede-3fe832068688","Type":"ContainerDied","Data":"5c299ce006082fecc5ad33df1f9ba1d2d6e0e13f17b30fe671d78fa56f59e00f"} Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.491527 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.643039 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ff848578-hzzpm" Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.726539 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.726799 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" containerID="cri-o://ef756dd7ca78401fe069870595145aa9884a84b3d85f93c48f6c46c9d35dc35d" gracePeriod=30 Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.727285 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api" containerID="cri-o://267293ec3f126bffbe399efe83e867bf3beeef11fcc3f369b3c6307cbede2a6a" gracePeriod=30 Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.931002 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:01:59 crc kubenswrapper[4712]: I0131 06:01:59.931053 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.275628 4712 generic.go:334] "Generic (PLEG): container finished" podID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerID="ef756dd7ca78401fe069870595145aa9884a84b3d85f93c48f6c46c9d35dc35d" exitCode=143 Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.277310 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerDied","Data":"ef756dd7ca78401fe069870595145aa9884a84b3d85f93c48f6c46c9d35dc35d"} Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.654870 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.821682 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncsts\" (UniqueName: \"kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.821761 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.821972 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.822046 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.822101 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.822130 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle\") pod \"29a76001-83c3-470c-aede-3fe832068688\" (UID: \"29a76001-83c3-470c-aede-3fe832068688\") " Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.822340 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.822701 4712 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29a76001-83c3-470c-aede-3fe832068688-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.828395 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts" (OuterVolumeSpecName: "kube-api-access-ncsts") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "kube-api-access-ncsts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.828787 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts" (OuterVolumeSpecName: "scripts") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.828953 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.868339 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.898702 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data" (OuterVolumeSpecName: "config-data") pod "29a76001-83c3-470c-aede-3fe832068688" (UID: "29a76001-83c3-470c-aede-3fe832068688"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.925201 4712 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.925245 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.925260 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncsts\" (UniqueName: \"kubernetes.io/projected/29a76001-83c3-470c-aede-3fe832068688-kube-api-access-ncsts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.925275 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.925290 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a76001-83c3-470c-aede-3fe832068688-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:00 crc kubenswrapper[4712]: I0131 06:02:00.994482 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l6lfz" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="registry-server" probeResult="failure" output=< Jan 31 06:02:00 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 06:02:00 crc kubenswrapper[4712]: > Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.287806 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tp7gg" event={"ID":"29a76001-83c3-470c-aede-3fe832068688","Type":"ContainerDied","Data":"8004bd0dce755f0af52ea09b7108d5e7a6f200c44972d4a035287b5483b92e91"} Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.288914 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8004bd0dce755f0af52ea09b7108d5e7a6f200c44972d4a035287b5483b92e91" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.287838 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tp7gg" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.290533 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerStarted","Data":"e9ce21874c22ee4c3a330ef845d7e62f675137fa0c5b9b17121818019cd5cec7"} Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.644382 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:01 crc kubenswrapper[4712]: E0131 06:02:01.645672 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a76001-83c3-470c-aede-3fe832068688" containerName="cinder-db-sync" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.645794 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a76001-83c3-470c-aede-3fe832068688" containerName="cinder-db-sync" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.646119 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a76001-83c3-470c-aede-3fe832068688" containerName="cinder-db-sync" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.649571 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.654253 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.654272 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.658577 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kdmz5" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.658695 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.658898 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.741824 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.741868 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bm48\" (UniqueName: \"kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.741918 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.741952 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.741981 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.742076 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.840294 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.845301 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849439 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849522 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849569 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bm48\" (UniqueName: \"kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849712 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849762 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.849809 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.858346 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.871241 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.882925 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.888540 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.889652 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.925012 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bm48\" (UniqueName: \"kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48\") pod \"cinder-scheduler-0\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.931814 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951308 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951395 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951451 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsrqr\" (UniqueName: \"kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951483 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951532 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.951548 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:01 crc kubenswrapper[4712]: I0131 06:02:01.986279 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.011479 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.013218 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.017644 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.049242 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053658 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsrqr\" (UniqueName: \"kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053717 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053772 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053793 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053854 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.053904 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.054963 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.055761 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.056275 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.056766 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.057283 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.098997 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsrqr\" (UniqueName: \"kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr\") pod \"dnsmasq-dns-5f7f54678f-flzvs\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.160639 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.161343 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.161418 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.161449 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.161503 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnksj\" (UniqueName: \"kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.162101 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.162198 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265519 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265589 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265619 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265637 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265670 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnksj\" (UniqueName: \"kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265751 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.265771 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.267055 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.268308 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.271161 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.271678 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.272474 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.272683 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.285714 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnksj\" (UniqueName: \"kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj\") pod \"cinder-api-0\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.343256 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.447234 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.668299 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.868090 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:02 crc kubenswrapper[4712]: W0131 06:02:02.894538 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbd78d84_f023_4a93_8bf9_d94dce69f2f7.slice/crio-911698e0ba603fc4e69f1721baf912c7c57b10de0e6b9cf6a862162e9e8dff6f WatchSource:0}: Error finding container 911698e0ba603fc4e69f1721baf912c7c57b10de0e6b9cf6a862162e9e8dff6f: Status 404 returned error can't find the container with id 911698e0ba603fc4e69f1721baf912c7c57b10de0e6b9cf6a862162e9e8dff6f Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.958636 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:60868->10.217.0.155:9311: read: connection reset by peer" Jan 31 06:02:02 crc kubenswrapper[4712]: I0131 06:02:02.959412 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7566766d6b-j72k9" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:60874->10.217.0.155:9311: read: connection reset by peer" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.074727 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.430784 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerStarted","Data":"94324a4d9442b9cd67fa074a81624496b968712fd194b96dfe48decd4877631f"} Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.538496 4712 generic.go:334] "Generic (PLEG): container finished" podID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerID="267293ec3f126bffbe399efe83e867bf3beeef11fcc3f369b3c6307cbede2a6a" exitCode=0 Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.538636 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerDied","Data":"267293ec3f126bffbe399efe83e867bf3beeef11fcc3f369b3c6307cbede2a6a"} Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.553157 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" event={"ID":"bbd78d84-f023-4a93-8bf9-d94dce69f2f7","Type":"ContainerStarted","Data":"911698e0ba603fc4e69f1721baf912c7c57b10de0e6b9cf6a862162e9e8dff6f"} Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.604204 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerStarted","Data":"34f314dc3ff96ebea017b644faeab656aac94ace85cd9c4186f832cabf0debdd"} Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.604528 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.609345 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerStarted","Data":"ce61ffac730c9e3ca3b93269eb3f69c73bb7ec0211f828ea66419592bdabc25d"} Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.645495 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.320164297 podStartE2EDuration="7.645469192s" podCreationTimestamp="2026-01-31 06:01:56 +0000 UTC" firstStartedPulling="2026-01-31 06:01:57.40508352 +0000 UTC m=+1383.498965361" lastFinishedPulling="2026-01-31 06:02:02.730388415 +0000 UTC m=+1388.824270256" observedRunningTime="2026-01-31 06:02:03.640233014 +0000 UTC m=+1389.734114855" watchObservedRunningTime="2026-01-31 06:02:03.645469192 +0000 UTC m=+1389.739351033" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.668626 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.809658 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4sr5\" (UniqueName: \"kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5\") pod \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.809989 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs\") pod \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.810076 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom\") pod \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.810471 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data\") pod \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.810511 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle\") pod \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\" (UID: \"ec5410d1-eedd-4ba5-b403-f52f970dafc1\") " Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.811530 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs" (OuterVolumeSpecName: "logs") pod "ec5410d1-eedd-4ba5-b403-f52f970dafc1" (UID: "ec5410d1-eedd-4ba5-b403-f52f970dafc1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.822163 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ec5410d1-eedd-4ba5-b403-f52f970dafc1" (UID: "ec5410d1-eedd-4ba5-b403-f52f970dafc1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.830398 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5" (OuterVolumeSpecName: "kube-api-access-s4sr5") pod "ec5410d1-eedd-4ba5-b403-f52f970dafc1" (UID: "ec5410d1-eedd-4ba5-b403-f52f970dafc1"). InnerVolumeSpecName "kube-api-access-s4sr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.891280 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec5410d1-eedd-4ba5-b403-f52f970dafc1" (UID: "ec5410d1-eedd-4ba5-b403-f52f970dafc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.913716 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.913760 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4sr5\" (UniqueName: \"kubernetes.io/projected/ec5410d1-eedd-4ba5-b403-f52f970dafc1-kube-api-access-s4sr5\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.913776 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec5410d1-eedd-4ba5-b403-f52f970dafc1-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.913787 4712 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:03 crc kubenswrapper[4712]: I0131 06:02:03.914034 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data" (OuterVolumeSpecName: "config-data") pod "ec5410d1-eedd-4ba5-b403-f52f970dafc1" (UID: "ec5410d1-eedd-4ba5-b403-f52f970dafc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.015780 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5410d1-eedd-4ba5-b403-f52f970dafc1-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.237044 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.678626 4712 generic.go:334] "Generic (PLEG): container finished" podID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerID="01d085cea9ad88faeffb1fdf8298f7d1944e6f98b982af0c301e9968c281627f" exitCode=0 Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.683053 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" event={"ID":"bbd78d84-f023-4a93-8bf9-d94dce69f2f7","Type":"ContainerDied","Data":"01d085cea9ad88faeffb1fdf8298f7d1944e6f98b982af0c301e9968c281627f"} Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.685803 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerStarted","Data":"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f"} Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.702764 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7566766d6b-j72k9" Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.703225 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7566766d6b-j72k9" event={"ID":"ec5410d1-eedd-4ba5-b403-f52f970dafc1","Type":"ContainerDied","Data":"e6fddc7a82607a005203c79c9acd4339774a5f7847b7c836ed847a9116a92127"} Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.703258 4712 scope.go:117] "RemoveContainer" containerID="267293ec3f126bffbe399efe83e867bf3beeef11fcc3f369b3c6307cbede2a6a" Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.770242 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.785706 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7566766d6b-j72k9"] Jan 31 06:02:04 crc kubenswrapper[4712]: I0131 06:02:04.788986 4712 scope.go:117] "RemoveContainer" containerID="ef756dd7ca78401fe069870595145aa9884a84b3d85f93c48f6c46c9d35dc35d" Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.713874 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerStarted","Data":"6bb6a6b2399a54b1516f8dedc320841bb0cf9be90ecf926ce2e676cfc54b2abf"} Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.714430 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerStarted","Data":"bfea9bc32270c5aaa431deb5aa0a94a658cafc354787d12ceeb6a72007304619"} Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.718737 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" event={"ID":"bbd78d84-f023-4a93-8bf9-d94dce69f2f7","Type":"ContainerStarted","Data":"76be602ba041475184cf206243196e743eaa9b5ea468e89ed3dafcfa2b249987"} Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.718860 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.720702 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerStarted","Data":"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e"} Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.720895 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.720888 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api-log" containerID="cri-o://5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f" gracePeriod=30 Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.720916 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api" containerID="cri-o://c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e" gracePeriod=30 Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.738219 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.129597476 podStartE2EDuration="4.738200922s" podCreationTimestamp="2026-01-31 06:02:01 +0000 UTC" firstStartedPulling="2026-01-31 06:02:02.693807059 +0000 UTC m=+1388.787688900" lastFinishedPulling="2026-01-31 06:02:03.302410505 +0000 UTC m=+1389.396292346" observedRunningTime="2026-01-31 06:02:05.734685626 +0000 UTC m=+1391.828567467" watchObservedRunningTime="2026-01-31 06:02:05.738200922 +0000 UTC m=+1391.832082763" Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.760284 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" podStartSLOduration=4.760262343 podStartE2EDuration="4.760262343s" podCreationTimestamp="2026-01-31 06:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:05.753551988 +0000 UTC m=+1391.847433829" watchObservedRunningTime="2026-01-31 06:02:05.760262343 +0000 UTC m=+1391.854144184" Jan 31 06:02:05 crc kubenswrapper[4712]: I0131 06:02:05.793099 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.793080167 podStartE2EDuration="4.793080167s" podCreationTimestamp="2026-01-31 06:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:05.774977924 +0000 UTC m=+1391.868859755" watchObservedRunningTime="2026-01-31 06:02:05.793080167 +0000 UTC m=+1391.886962008" Jan 31 06:02:06 crc kubenswrapper[4712]: I0131 06:02:06.515638 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" path="/var/lib/kubelet/pods/ec5410d1-eedd-4ba5-b403-f52f970dafc1/volumes" Jan 31 06:02:06 crc kubenswrapper[4712]: I0131 06:02:06.739112 4712 generic.go:334] "Generic (PLEG): container finished" podID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerID="5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f" exitCode=143 Jan 31 06:02:06 crc kubenswrapper[4712]: I0131 06:02:06.739192 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerDied","Data":"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f"} Jan 31 06:02:06 crc kubenswrapper[4712]: I0131 06:02:06.987580 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 31 06:02:07 crc kubenswrapper[4712]: I0131 06:02:07.813025 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-76bf8584d5-c59jx" Jan 31 06:02:09 crc kubenswrapper[4712]: I0131 06:02:09.401713 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:02:09 crc kubenswrapper[4712]: I0131 06:02:09.415453 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-778dc5b584-k7lvt" Jan 31 06:02:09 crc kubenswrapper[4712]: I0131 06:02:09.980961 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.041080 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.161903 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: E0131 06:02:10.162877 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.162907 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api" Jan 31 06:02:10 crc kubenswrapper[4712]: E0131 06:02:10.162960 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.162968 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.163398 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.163424 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec5410d1-eedd-4ba5-b403-f52f970dafc1" containerName="barbican-api-log" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.164326 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.168434 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-qmv5t" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.168720 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.168846 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.208160 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.251769 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.251830 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.251943 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.251970 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqkh9\" (UniqueName: \"kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.354132 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.354288 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqkh9\" (UniqueName: \"kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.354498 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.354556 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.355866 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.372580 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.372724 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.376321 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqkh9\" (UniqueName: \"kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9\") pod \"openstackclient\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.500761 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.603250 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.615165 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.627661 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.636677 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.638036 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.653164 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.764404 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config-secret\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.764485 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.764556 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s5zp\" (UniqueName: \"kubernetes.io/projected/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-kube-api-access-4s5zp\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.764581 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.805104 4712 generic.go:334] "Generic (PLEG): container finished" podID="f515caad-5449-4314-ba23-cc132eba7102" containerID="fab21e60b8618323bccecabf09432c2e453bbf57faf938e7fd6c0fec18f5d266" exitCode=0 Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.805432 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bp49d" event={"ID":"f515caad-5449-4314-ba23-cc132eba7102","Type":"ContainerDied","Data":"fab21e60b8618323bccecabf09432c2e453bbf57faf938e7fd6c0fec18f5d266"} Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.871483 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s5zp\" (UniqueName: \"kubernetes.io/projected/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-kube-api-access-4s5zp\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.871552 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.871725 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config-secret\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.871786 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.872787 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: E0131 06:02:10.873342 4712 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 31 06:02:10 crc kubenswrapper[4712]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_c9dfb688-09d7-483c-ad27-f51712a87c43_0(b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515" Netns:"/var/run/netns/2b6b0353-eef9-4b5f-834e-7edd052b2b9e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515;K8S_POD_UID=c9dfb688-09d7-483c-ad27-f51712a87c43" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: [openstack/openstackclient/c9dfb688-09d7-483c-ad27-f51712a87c43:ovn-kubernetes]: error adding container to network "ovn-kubernetes": CNI request failed with status 400: '[openstack/openstackclient b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515 network default NAD default] [openstack/openstackclient b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515 network default NAD default] pod deleted before sandbox ADD operation began Jan 31 06:02:10 crc kubenswrapper[4712]: ' Jan 31 06:02:10 crc kubenswrapper[4712]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 31 06:02:10 crc kubenswrapper[4712]: > Jan 31 06:02:10 crc kubenswrapper[4712]: E0131 06:02:10.873416 4712 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 31 06:02:10 crc kubenswrapper[4712]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_c9dfb688-09d7-483c-ad27-f51712a87c43_0(b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515" Netns:"/var/run/netns/2b6b0353-eef9-4b5f-834e-7edd052b2b9e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515;K8S_POD_UID=c9dfb688-09d7-483c-ad27-f51712a87c43" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: [openstack/openstackclient/c9dfb688-09d7-483c-ad27-f51712a87c43:ovn-kubernetes]: error adding container to network "ovn-kubernetes": CNI request failed with status 400: '[openstack/openstackclient b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515 network default NAD default] [openstack/openstackclient b66ec57774881676618d477a65edbb719bdbc8628027e942be7f8a2502908515 network default NAD default] pod deleted before sandbox ADD operation began Jan 31 06:02:10 crc kubenswrapper[4712]: ' Jan 31 06:02:10 crc kubenswrapper[4712]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 31 06:02:10 crc kubenswrapper[4712]: > pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.876810 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-openstack-config-secret\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.876972 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.890377 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s5zp\" (UniqueName: \"kubernetes.io/projected/5403021d-e6d4-4e4e-aa8e-8879f65f9f36-kube-api-access-4s5zp\") pod \"openstackclient\" (UID: \"5403021d-e6d4-4e4e-aa8e-8879f65f9f36\") " pod="openstack/openstackclient" Jan 31 06:02:10 crc kubenswrapper[4712]: I0131 06:02:10.970664 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:11 crc kubenswrapper[4712]: W0131 06:02:11.473908 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5403021d_e6d4_4e4e_aa8e_8879f65f9f36.slice/crio-600df99c7716afda050b392cb931e0f9ef093664ece7a228cc58b6b01ddea397 WatchSource:0}: Error finding container 600df99c7716afda050b392cb931e0f9ef093664ece7a228cc58b6b01ddea397: Status 404 returned error can't find the container with id 600df99c7716afda050b392cb931e0f9ef093664ece7a228cc58b6b01ddea397 Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.474315 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.821131 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5403021d-e6d4-4e4e-aa8e-8879f65f9f36","Type":"ContainerStarted","Data":"600df99c7716afda050b392cb931e0f9ef093664ece7a228cc58b6b01ddea397"} Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.821318 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.821489 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l6lfz" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="registry-server" containerID="cri-o://85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3" gracePeriod=2 Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.828996 4712 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="c9dfb688-09d7-483c-ad27-f51712a87c43" podUID="5403021d-e6d4-4e4e-aa8e-8879f65f9f36" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.840933 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.912302 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret\") pod \"c9dfb688-09d7-483c-ad27-f51712a87c43\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.912437 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config\") pod \"c9dfb688-09d7-483c-ad27-f51712a87c43\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.912472 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqkh9\" (UniqueName: \"kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9\") pod \"c9dfb688-09d7-483c-ad27-f51712a87c43\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.912534 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle\") pod \"c9dfb688-09d7-483c-ad27-f51712a87c43\" (UID: \"c9dfb688-09d7-483c-ad27-f51712a87c43\") " Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.913757 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "c9dfb688-09d7-483c-ad27-f51712a87c43" (UID: "c9dfb688-09d7-483c-ad27-f51712a87c43"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.917919 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9dfb688-09d7-483c-ad27-f51712a87c43" (UID: "c9dfb688-09d7-483c-ad27-f51712a87c43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.918898 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9" (OuterVolumeSpecName: "kube-api-access-qqkh9") pod "c9dfb688-09d7-483c-ad27-f51712a87c43" (UID: "c9dfb688-09d7-483c-ad27-f51712a87c43"). InnerVolumeSpecName "kube-api-access-qqkh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:11 crc kubenswrapper[4712]: I0131 06:02:11.919008 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "c9dfb688-09d7-483c-ad27-f51712a87c43" (UID: "c9dfb688-09d7-483c-ad27-f51712a87c43"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.014961 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.015003 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c9dfb688-09d7-483c-ad27-f51712a87c43-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.015014 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqkh9\" (UniqueName: \"kubernetes.io/projected/c9dfb688-09d7-483c-ad27-f51712a87c43-kube-api-access-qqkh9\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.015023 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9dfb688-09d7-483c-ad27-f51712a87c43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.229951 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bp49d" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.320444 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config\") pod \"f515caad-5449-4314-ba23-cc132eba7102\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.320543 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgzbz\" (UniqueName: \"kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz\") pod \"f515caad-5449-4314-ba23-cc132eba7102\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.320611 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle\") pod \"f515caad-5449-4314-ba23-cc132eba7102\" (UID: \"f515caad-5449-4314-ba23-cc132eba7102\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.334420 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz" (OuterVolumeSpecName: "kube-api-access-rgzbz") pod "f515caad-5449-4314-ba23-cc132eba7102" (UID: "f515caad-5449-4314-ba23-cc132eba7102"). InnerVolumeSpecName "kube-api-access-rgzbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.347346 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.349539 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.358335 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f515caad-5449-4314-ba23-cc132eba7102" (UID: "f515caad-5449-4314-ba23-cc132eba7102"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.358362 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config" (OuterVolumeSpecName: "config") pod "f515caad-5449-4314-ba23-cc132eba7102" (UID: "f515caad-5449-4314-ba23-cc132eba7102"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.358501 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.421980 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dswpg\" (UniqueName: \"kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg\") pod \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.422066 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities\") pod \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.422160 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content\") pod \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\" (UID: \"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf\") " Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.423291 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.423314 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgzbz\" (UniqueName: \"kubernetes.io/projected/f515caad-5449-4314-ba23-cc132eba7102-kube-api-access-rgzbz\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.423326 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f515caad-5449-4314-ba23-cc132eba7102-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.424710 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities" (OuterVolumeSpecName: "utilities") pod "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" (UID: "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.459105 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg" (OuterVolumeSpecName: "kube-api-access-dswpg") pod "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" (UID: "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf"). InnerVolumeSpecName "kube-api-access-dswpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.502141 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.502474 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="dnsmasq-dns" containerID="cri-o://ea4754ea4860879bcd24269802a1feed811f143a90297096962b670877941963" gracePeriod=10 Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.525432 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dswpg\" (UniqueName: \"kubernetes.io/projected/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-kube-api-access-dswpg\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.525469 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.544157 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9dfb688-09d7-483c-ad27-f51712a87c43" path="/var/lib/kubelet/pods/c9dfb688-09d7-483c-ad27-f51712a87c43/volumes" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.550438 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.649482 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" (UID: "1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.733264 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.852076 4712 generic.go:334] "Generic (PLEG): container finished" podID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerID="ea4754ea4860879bcd24269802a1feed811f143a90297096962b670877941963" exitCode=0 Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.852142 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerDied","Data":"ea4754ea4860879bcd24269802a1feed811f143a90297096962b670877941963"} Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.856474 4712 generic.go:334] "Generic (PLEG): container finished" podID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerID="85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3" exitCode=0 Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.856514 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerDied","Data":"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3"} Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.856534 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6lfz" event={"ID":"1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf","Type":"ContainerDied","Data":"5ed86ea733a19c6793a045b195e8717b6a5c812f710a1cdc483aae35db4e779b"} Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.856551 4712 scope.go:117] "RemoveContainer" containerID="85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.856689 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6lfz" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.875708 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="cinder-scheduler" containerID="cri-o://6bb6a6b2399a54b1516f8dedc320841bb0cf9be90ecf926ce2e676cfc54b2abf" gracePeriod=30 Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.875767 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.876293 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="probe" containerID="cri-o://bfea9bc32270c5aaa431deb5aa0a94a658cafc354787d12ceeb6a72007304619" gracePeriod=30 Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.876380 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bp49d" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.876375 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bp49d" event={"ID":"f515caad-5449-4314-ba23-cc132eba7102","Type":"ContainerDied","Data":"9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4"} Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.876421 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dcc1f864689e5845aede4dd42b9b530a4bafbaeb67e239b57690353765794f4" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.899542 4712 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="c9dfb688-09d7-483c-ad27-f51712a87c43" podUID="5403021d-e6d4-4e4e-aa8e-8879f65f9f36" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.901049 4712 scope.go:117] "RemoveContainer" containerID="84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.936730 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.957806 4712 scope.go:117] "RemoveContainer" containerID="1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8" Jan 31 06:02:12 crc kubenswrapper[4712]: I0131 06:02:12.964023 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l6lfz"] Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.041403 4712 scope.go:117] "RemoveContainer" containerID="85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.043887 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3\": container with ID starting with 85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3 not found: ID does not exist" containerID="85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.043919 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3"} err="failed to get container status \"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3\": rpc error: code = NotFound desc = could not find container \"85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3\": container with ID starting with 85d2d8dd532c975edbfb07a66fdb2776e7a7369ba6bdfdb154fb131e018633a3 not found: ID does not exist" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.043942 4712 scope.go:117] "RemoveContainer" containerID="84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.052326 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc\": container with ID starting with 84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc not found: ID does not exist" containerID="84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.052361 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc"} err="failed to get container status \"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc\": rpc error: code = NotFound desc = could not find container \"84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc\": container with ID starting with 84780b3cd237cab75a45671295b7a58b694a1001e91eabbee006ce231d23e5cc not found: ID does not exist" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.052383 4712 scope.go:117] "RemoveContainer" containerID="1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.058386 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8\": container with ID starting with 1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8 not found: ID does not exist" containerID="1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.058424 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8"} err="failed to get container status \"1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8\": rpc error: code = NotFound desc = could not find container \"1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8\": container with ID starting with 1e5a0f498b8d21265c97c994849eb88ba35a1e459b674570f9916990776415c8 not found: ID does not exist" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148314 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.148733 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="extract-utilities" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148746 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="extract-utilities" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.148759 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="extract-content" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148765 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="extract-content" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.148775 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="registry-server" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148781 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="registry-server" Jan 31 06:02:13 crc kubenswrapper[4712]: E0131 06:02:13.148810 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f515caad-5449-4314-ba23-cc132eba7102" containerName="neutron-db-sync" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148817 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f515caad-5449-4314-ba23-cc132eba7102" containerName="neutron-db-sync" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.148983 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f515caad-5449-4314-ba23-cc132eba7102" containerName="neutron-db-sync" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.149001 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" containerName="registry-server" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.150002 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.186841 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256387 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256471 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256514 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256546 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256584 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.256621 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhltd\" (UniqueName: \"kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.358905 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhltd\" (UniqueName: \"kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.358999 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.359039 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.359078 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.359114 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.359146 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.360193 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.360270 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.361015 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.366459 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.368037 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.368140 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.369633 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.373725 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.374102 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mspqc" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.374453 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.379810 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.390480 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhltd\" (UniqueName: \"kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd\") pod \"dnsmasq-dns-66bccdcd7c-pgqbm\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.396652 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.462747 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncpcf\" (UniqueName: \"kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.463307 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.463395 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.463433 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.463471 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.535729 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.566133 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncpcf\" (UniqueName: \"kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.566196 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.566269 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.566299 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.566336 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.574045 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.586251 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.590249 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncpcf\" (UniqueName: \"kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.599783 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.636016 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config\") pod \"neutron-77dc4b7484-plhfq\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.741564 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.863631 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.922128 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" event={"ID":"ccfdebd9-0d97-4015-bc8d-c88944835f62","Type":"ContainerDied","Data":"c5fad597cbc4503f1ef7495104ce2d5f1e1853b5aefa5fb70fc7ad085d6320b2"} Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.922200 4712 scope.go:117] "RemoveContainer" containerID="ea4754ea4860879bcd24269802a1feed811f143a90297096962b670877941963" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.922343 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54d644b8ff-2w9fj" Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.976797 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fw47\" (UniqueName: \"kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.976927 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.977041 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.977100 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.977151 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:13 crc kubenswrapper[4712]: I0131 06:02:13.977245 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc\") pod \"ccfdebd9-0d97-4015-bc8d-c88944835f62\" (UID: \"ccfdebd9-0d97-4015-bc8d-c88944835f62\") " Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.005902 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47" (OuterVolumeSpecName: "kube-api-access-5fw47") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "kube-api-access-5fw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.057754 4712 scope.go:117] "RemoveContainer" containerID="61f0d06506088cf8d5df8c1775e74daa2b559846e5b922090eb4779a3aeb38c2" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.074193 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.079027 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.079061 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fw47\" (UniqueName: \"kubernetes.io/projected/ccfdebd9-0d97-4015-bc8d-c88944835f62-kube-api-access-5fw47\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.085003 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config" (OuterVolumeSpecName: "config") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.101321 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.114867 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.119279 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ccfdebd9-0d97-4015-bc8d-c88944835f62" (UID: "ccfdebd9-0d97-4015-bc8d-c88944835f62"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.184377 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.184603 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.184697 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.184754 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ccfdebd9-0d97-4015-bc8d-c88944835f62-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.265221 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.276876 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54d644b8ff-2w9fj"] Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.349591 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.554843 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf" path="/var/lib/kubelet/pods/1da0bdfa-96fc-4a66-ac98-c1cd1945b9cf/volumes" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.556507 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" path="/var/lib/kubelet/pods/ccfdebd9-0d97-4015-bc8d-c88944835f62/volumes" Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.659574 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.947509 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerStarted","Data":"2190e6c0f464b23ca4b2f9284b60ca56ca911d5012963cf0e6b83c5e19ed5d11"} Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.950492 4712 generic.go:334] "Generic (PLEG): container finished" podID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerID="bfea9bc32270c5aaa431deb5aa0a94a658cafc354787d12ceeb6a72007304619" exitCode=0 Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.950584 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerDied","Data":"bfea9bc32270c5aaa431deb5aa0a94a658cafc354787d12ceeb6a72007304619"} Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.953729 4712 generic.go:334] "Generic (PLEG): container finished" podID="f5da79de-08a9-473b-ad68-846cb4403a85" containerID="b58abf8caf9af39c62cfa3f55e85b3569dce310107d1609ad07a24e5650ca846" exitCode=0 Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.953776 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" event={"ID":"f5da79de-08a9-473b-ad68-846cb4403a85","Type":"ContainerDied","Data":"b58abf8caf9af39c62cfa3f55e85b3569dce310107d1609ad07a24e5650ca846"} Jan 31 06:02:14 crc kubenswrapper[4712]: I0131 06:02:14.953814 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" event={"ID":"f5da79de-08a9-473b-ad68-846cb4403a85","Type":"ContainerStarted","Data":"1dc48d6d49a65f3f180684236133f168a550f5481d014999a0d356f95aba04fe"} Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.534442 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.965563 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerStarted","Data":"4a1e8a9ff09ef41761f615f9444ad1d74ec714281513e738e056eb11c837f8f7"} Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.965918 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerStarted","Data":"9c3af8b24418c9521385fbe3496ea55dbfc0135a3325bb5b0e3a8027d8e6606f"} Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.965942 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.974140 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" event={"ID":"f5da79de-08a9-473b-ad68-846cb4403a85","Type":"ContainerStarted","Data":"e37f59ccedffffa7f93fd2a9947cc705171929dc3e78d65ce03450efcd1d911d"} Jan 31 06:02:15 crc kubenswrapper[4712]: I0131 06:02:15.974483 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.003043 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-77dc4b7484-plhfq" podStartSLOduration=3.003023848 podStartE2EDuration="3.003023848s" podCreationTimestamp="2026-01-31 06:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:15.993807822 +0000 UTC m=+1402.087689673" watchObservedRunningTime="2026-01-31 06:02:16.003023848 +0000 UTC m=+1402.096905689" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.020279 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" podStartSLOduration=3.02025656 podStartE2EDuration="3.02025656s" podCreationTimestamp="2026-01-31 06:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:16.014558831 +0000 UTC m=+1402.108440672" watchObservedRunningTime="2026-01-31 06:02:16.02025656 +0000 UTC m=+1402.114138411" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.177552 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6445df85d9-7dknt"] Jan 31 06:02:16 crc kubenswrapper[4712]: E0131 06:02:16.177977 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="init" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.177993 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="init" Jan 31 06:02:16 crc kubenswrapper[4712]: E0131 06:02:16.178045 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="dnsmasq-dns" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.178054 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="dnsmasq-dns" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.178282 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccfdebd9-0d97-4015-bc8d-c88944835f62" containerName="dnsmasq-dns" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.179456 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.188690 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.188938 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.223124 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6445df85d9-7dknt"] Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365370 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsn74\" (UniqueName: \"kubernetes.io/projected/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-kube-api-access-rsn74\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365444 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-httpd-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365514 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-public-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365535 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-internal-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365596 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365613 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-combined-ca-bundle\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.365635 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-ovndb-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.467308 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.467353 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-combined-ca-bundle\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.468208 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-ovndb-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.468269 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsn74\" (UniqueName: \"kubernetes.io/projected/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-kube-api-access-rsn74\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.468334 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-httpd-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.468421 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-public-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.468454 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-internal-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.483286 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-httpd-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.484261 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-public-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.485866 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-internal-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.490908 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-config\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.498791 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-ovndb-tls-certs\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.506013 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-combined-ca-bundle\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.510291 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsn74\" (UniqueName: \"kubernetes.io/projected/2b8f18ba-9096-4d08-9d1f-4efed6b7883a-kube-api-access-rsn74\") pod \"neutron-6445df85d9-7dknt\" (UID: \"2b8f18ba-9096-4d08-9d1f-4efed6b7883a\") " pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.524677 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.993978 4712 generic.go:334] "Generic (PLEG): container finished" podID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerID="6bb6a6b2399a54b1516f8dedc320841bb0cf9be90ecf926ce2e676cfc54b2abf" exitCode=0 Jan 31 06:02:16 crc kubenswrapper[4712]: I0131 06:02:16.994332 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerDied","Data":"6bb6a6b2399a54b1516f8dedc320841bb0cf9be90ecf926ce2e676cfc54b2abf"} Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.218406 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6445df85d9-7dknt"] Jan 31 06:02:17 crc kubenswrapper[4712]: W0131 06:02:17.231991 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b8f18ba_9096_4d08_9d1f_4efed6b7883a.slice/crio-f9ff94da10fa73ebee312edae24fb676faf65333b76e38cf4e05e320e7b79d7b WatchSource:0}: Error finding container f9ff94da10fa73ebee312edae24fb676faf65333b76e38cf4e05e320e7b79d7b: Status 404 returned error can't find the container with id f9ff94da10fa73ebee312edae24fb676faf65333b76e38cf4e05e320e7b79d7b Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.373496 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.514840 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.515003 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.515035 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.515068 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bm48\" (UniqueName: \"kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.515089 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.515124 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data\") pod \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\" (UID: \"bf2d2b95-18f8-446c-8a93-d575bc5ea086\") " Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.518494 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.522846 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48" (OuterVolumeSpecName: "kube-api-access-6bm48") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "kube-api-access-6bm48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.529402 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.529557 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts" (OuterVolumeSpecName: "scripts") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.620225 4712 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.621757 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.621803 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bm48\" (UniqueName: \"kubernetes.io/projected/bf2d2b95-18f8-446c-8a93-d575bc5ea086-kube-api-access-6bm48\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.621820 4712 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2d2b95-18f8-446c-8a93-d575bc5ea086-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.632533 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.672451 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data" (OuterVolumeSpecName: "config-data") pod "bf2d2b95-18f8-446c-8a93-d575bc5ea086" (UID: "bf2d2b95-18f8-446c-8a93-d575bc5ea086"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.723493 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:17 crc kubenswrapper[4712]: I0131 06:02:17.723544 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2d2b95-18f8-446c-8a93-d575bc5ea086-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.037228 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6445df85d9-7dknt" event={"ID":"2b8f18ba-9096-4d08-9d1f-4efed6b7883a","Type":"ContainerStarted","Data":"7eedb630623a2befc00c0415460d2e12f17cdaac9d61648dd3edc948e9a0006a"} Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.037898 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.037915 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6445df85d9-7dknt" event={"ID":"2b8f18ba-9096-4d08-9d1f-4efed6b7883a","Type":"ContainerStarted","Data":"8da2a2d0b387d1474399c504a2ca3b0edf348040ed36105a1732ee5db758b63e"} Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.037925 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6445df85d9-7dknt" event={"ID":"2b8f18ba-9096-4d08-9d1f-4efed6b7883a","Type":"ContainerStarted","Data":"f9ff94da10fa73ebee312edae24fb676faf65333b76e38cf4e05e320e7b79d7b"} Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.043771 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf2d2b95-18f8-446c-8a93-d575bc5ea086","Type":"ContainerDied","Data":"ce61ffac730c9e3ca3b93269eb3f69c73bb7ec0211f828ea66419592bdabc25d"} Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.043819 4712 scope.go:117] "RemoveContainer" containerID="bfea9bc32270c5aaa431deb5aa0a94a658cafc354787d12ceeb6a72007304619" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.043955 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.066016 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6445df85d9-7dknt" podStartSLOduration=2.065993738 podStartE2EDuration="2.065993738s" podCreationTimestamp="2026-01-31 06:02:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:18.057278955 +0000 UTC m=+1404.151160796" watchObservedRunningTime="2026-01-31 06:02:18.065993738 +0000 UTC m=+1404.159875579" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.091850 4712 scope.go:117] "RemoveContainer" containerID="6bb6a6b2399a54b1516f8dedc320841bb0cf9be90ecf926ce2e676cfc54b2abf" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.103256 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.129388 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.140056 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:18 crc kubenswrapper[4712]: E0131 06:02:18.140638 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="probe" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.140657 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="probe" Jan 31 06:02:18 crc kubenswrapper[4712]: E0131 06:02:18.140676 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="cinder-scheduler" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.140683 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="cinder-scheduler" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.140859 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="cinder-scheduler" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.140891 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" containerName="probe" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.141877 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.152013 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.152518 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.339937 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rwpp\" (UniqueName: \"kubernetes.io/projected/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-kube-api-access-6rwpp\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.339980 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.340000 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.340022 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.340165 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.340257 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-scripts\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442271 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442747 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-scripts\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442908 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rwpp\" (UniqueName: \"kubernetes.io/projected/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-kube-api-access-6rwpp\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442938 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442965 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.442995 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.443844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.449110 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.449973 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-scripts\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.450554 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.451154 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-config-data\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.466457 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rwpp\" (UniqueName: \"kubernetes.io/projected/2118bcf2-8afd-4e35-b53b-6998f5c6a5cc-kube-api-access-6rwpp\") pod \"cinder-scheduler-0\" (UID: \"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc\") " pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.492842 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.518790 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf2d2b95-18f8-446c-8a93-d575bc5ea086" path="/var/lib/kubelet/pods/bf2d2b95-18f8-446c-8a93-d575bc5ea086/volumes" Jan 31 06:02:18 crc kubenswrapper[4712]: I0131 06:02:18.997790 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 31 06:02:19 crc kubenswrapper[4712]: I0131 06:02:19.093607 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc","Type":"ContainerStarted","Data":"6becf6ffd06dee1f5544f801ff29d2dc6d52beaa73672af47f5a5f7b61dc911f"} Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.115752 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc","Type":"ContainerStarted","Data":"0eb8a6b6e2ef8911ff1092d0f56a83b9b9dea37871059cd6243b3f753383f6d3"} Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.728887 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-8557fb7df9-bhzkt"] Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.731355 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.733850 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.735197 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.736152 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.785228 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8557fb7df9-bhzkt"] Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908234 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-combined-ca-bundle\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908387 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz2wt\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-kube-api-access-xz2wt\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908420 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-etc-swift\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908478 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-run-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908515 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-log-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908542 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-internal-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908591 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-config-data\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:20 crc kubenswrapper[4712]: I0131 06:02:20.908619 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-public-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010774 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz2wt\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-kube-api-access-xz2wt\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010827 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-etc-swift\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010891 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-run-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010928 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-log-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010951 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-internal-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010971 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-config-data\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.010987 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-public-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.011036 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-combined-ca-bundle\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.011931 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-run-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.012019 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-log-httpd\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.019497 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-combined-ca-bundle\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.022147 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-internal-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.023194 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-config-data\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.030510 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-etc-swift\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.032063 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-public-tls-certs\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.033240 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz2wt\" (UniqueName: \"kubernetes.io/projected/2a8507c3-4b91-4b81-83ba-4bb63b3745f0-kube-api-access-xz2wt\") pod \"swift-proxy-8557fb7df9-bhzkt\" (UID: \"2a8507c3-4b91-4b81-83ba-4bb63b3745f0\") " pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.101961 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:21 crc kubenswrapper[4712]: I0131 06:02:21.772685 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8557fb7df9-bhzkt"] Jan 31 06:02:22 crc kubenswrapper[4712]: I0131 06:02:22.054436 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:22 crc kubenswrapper[4712]: I0131 06:02:22.054987 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-log" containerID="cri-o://53f0ebddd685206db7ac8cf26daaf377aff3e3de846de3284b0198d67695cdeb" gracePeriod=30 Jan 31 06:02:22 crc kubenswrapper[4712]: I0131 06:02:22.055666 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-httpd" containerID="cri-o://b0d408ba7e02fa15ef6bcc304bb0bbc26769dd7735728186d94bc386ebf0abcc" gracePeriod=30 Jan 31 06:02:22 crc kubenswrapper[4712]: I0131 06:02:22.139217 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2118bcf2-8afd-4e35-b53b-6998f5c6a5cc","Type":"ContainerStarted","Data":"36385e3c52dcc41be9e7aab471a7d131083084e811c82f98302219c993770fd1"} Jan 31 06:02:22 crc kubenswrapper[4712]: I0131 06:02:22.158402 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.158381407 podStartE2EDuration="4.158381407s" podCreationTimestamp="2026-01-31 06:02:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:22.156613644 +0000 UTC m=+1408.250495495" watchObservedRunningTime="2026-01-31 06:02:22.158381407 +0000 UTC m=+1408.252263248" Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.150898 4712 generic.go:334] "Generic (PLEG): container finished" podID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerID="53f0ebddd685206db7ac8cf26daaf377aff3e3de846de3284b0198d67695cdeb" exitCode=143 Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.150942 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerDied","Data":"53f0ebddd685206db7ac8cf26daaf377aff3e3de846de3284b0198d67695cdeb"} Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.493882 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.538428 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.611388 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.612044 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="dnsmasq-dns" containerID="cri-o://76be602ba041475184cf206243196e743eaa9b5ea468e89ed3dafcfa2b249987" gracePeriod=10 Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.902348 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.903045 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-httpd" containerID="cri-o://05accd9ce82b1c1a73b172f4530252bfe31cc572049b7be75e84fd9d091effe9" gracePeriod=30 Jan 31 06:02:23 crc kubenswrapper[4712]: I0131 06:02:23.902932 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-log" containerID="cri-o://be41daf527ed6a6d257b76a18943d18245f49a797a3eda78204b1ab71eeef785" gracePeriod=30 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.013580 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wmdr2"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.016646 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.039375 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wmdr2"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.107423 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.107666 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfbh6\" (UniqueName: \"kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.115125 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-3898-account-create-update-xd6dr"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.116483 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.120970 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.134239 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3898-account-create-update-xd6dr"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.187068 4712 generic.go:334] "Generic (PLEG): container finished" podID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerID="76be602ba041475184cf206243196e743eaa9b5ea468e89ed3dafcfa2b249987" exitCode=0 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.187142 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" event={"ID":"bbd78d84-f023-4a93-8bf9-d94dce69f2f7","Type":"ContainerDied","Data":"76be602ba041475184cf206243196e743eaa9b5ea468e89ed3dafcfa2b249987"} Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.191715 4712 generic.go:334] "Generic (PLEG): container finished" podID="003300cf-c4ba-470b-aba7-3c63a426e425" containerID="be41daf527ed6a6d257b76a18943d18245f49a797a3eda78204b1ab71eeef785" exitCode=143 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.191800 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerDied","Data":"be41daf527ed6a6d257b76a18943d18245f49a797a3eda78204b1ab71eeef785"} Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.212430 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.212528 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9nw8\" (UniqueName: \"kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.212626 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.212700 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfbh6\" (UniqueName: \"kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.212743 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-hkgvt"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.214210 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.214283 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.237086 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfbh6\" (UniqueName: \"kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6\") pod \"nova-api-db-create-wmdr2\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.241407 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-hkgvt"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.320445 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-844dj\" (UniqueName: \"kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.320810 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.320846 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.321072 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9nw8\" (UniqueName: \"kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.322687 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.340419 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-xjh9f"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.341259 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.341627 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.360271 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6ea2-account-create-update-sk5tw"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.361284 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9nw8\" (UniqueName: \"kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8\") pod \"nova-api-3898-account-create-update-xd6dr\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.368869 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.371058 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.374838 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xjh9f"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.408286 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6ea2-account-create-update-sk5tw"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.423190 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.423376 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-844dj\" (UniqueName: \"kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.427571 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.443991 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.453285 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-4029-account-create-update-2kvgn"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.454547 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.456795 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.466006 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-844dj\" (UniqueName: \"kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj\") pod \"nova-cell0-db-create-hkgvt\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.468753 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4029-account-create-update-2kvgn"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.527634 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.528642 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6r7s\" (UniqueName: \"kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.528799 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfr7s\" (UniqueName: \"kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.529535 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.530346 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634064 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcfqt\" (UniqueName: \"kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634136 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634203 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6r7s\" (UniqueName: \"kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634223 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfr7s\" (UniqueName: \"kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634263 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634306 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.634981 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.635602 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.663318 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6r7s\" (UniqueName: \"kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s\") pod \"nova-cell1-db-create-xjh9f\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.683833 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfr7s\" (UniqueName: \"kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s\") pod \"nova-cell1-6ea2-account-create-update-sk5tw\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.728242 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.739979 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.742148 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcfqt\" (UniqueName: \"kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.742311 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.743284 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.763370 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcfqt\" (UniqueName: \"kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt\") pod \"nova-cell0-4029-account-create-update-2kvgn\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.844444 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.844786 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-central-agent" containerID="cri-o://04713538ecd0d6f2f98417d580ccc209c6f73c5a5eff465a0ecb7dc62c7009c7" gracePeriod=30 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.844864 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" containerID="cri-o://34f314dc3ff96ebea017b644faeab656aac94ace85cd9c4186f832cabf0debdd" gracePeriod=30 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.844912 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="sg-core" containerID="cri-o://e9ce21874c22ee4c3a330ef845d7e62f675137fa0c5b9b17121818019cd5cec7" gracePeriod=30 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.844947 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-notification-agent" containerID="cri-o://b378a9997bde935cc0cd9c6355e147ced72ac937b80d86f1ca07d3fa7274b7c4" gracePeriod=30 Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.858712 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": EOF" Jan 31 06:02:24 crc kubenswrapper[4712]: I0131 06:02:24.863698 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:25 crc kubenswrapper[4712]: I0131 06:02:25.206022 4712 generic.go:334] "Generic (PLEG): container finished" podID="48089928-0ffc-4d25-adf9-57f7874477c2" containerID="e9ce21874c22ee4c3a330ef845d7e62f675137fa0c5b9b17121818019cd5cec7" exitCode=2 Jan 31 06:02:25 crc kubenswrapper[4712]: I0131 06:02:25.206082 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerDied","Data":"e9ce21874c22ee4c3a330ef845d7e62f675137fa0c5b9b17121818019cd5cec7"} Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.223937 4712 generic.go:334] "Generic (PLEG): container finished" podID="48089928-0ffc-4d25-adf9-57f7874477c2" containerID="34f314dc3ff96ebea017b644faeab656aac94ace85cd9c4186f832cabf0debdd" exitCode=0 Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.224484 4712 generic.go:334] "Generic (PLEG): container finished" podID="48089928-0ffc-4d25-adf9-57f7874477c2" containerID="04713538ecd0d6f2f98417d580ccc209c6f73c5a5eff465a0ecb7dc62c7009c7" exitCode=0 Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.224019 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerDied","Data":"34f314dc3ff96ebea017b644faeab656aac94ace85cd9c4186f832cabf0debdd"} Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.224574 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerDied","Data":"04713538ecd0d6f2f98417d580ccc209c6f73c5a5eff465a0ecb7dc62c7009c7"} Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.226838 4712 generic.go:334] "Generic (PLEG): container finished" podID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerID="b0d408ba7e02fa15ef6bcc304bb0bbc26769dd7735728186d94bc386ebf0abcc" exitCode=0 Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.226874 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerDied","Data":"b0d408ba7e02fa15ef6bcc304bb0bbc26769dd7735728186d94bc386ebf0abcc"} Jan 31 06:02:26 crc kubenswrapper[4712]: I0131 06:02:26.952343 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": dial tcp 10.217.0.157:3000: connect: connection refused" Jan 31 06:02:27 crc kubenswrapper[4712]: I0131 06:02:27.239513 4712 generic.go:334] "Generic (PLEG): container finished" podID="003300cf-c4ba-470b-aba7-3c63a426e425" containerID="05accd9ce82b1c1a73b172f4530252bfe31cc572049b7be75e84fd9d091effe9" exitCode=0 Jan 31 06:02:27 crc kubenswrapper[4712]: I0131 06:02:27.239571 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerDied","Data":"05accd9ce82b1c1a73b172f4530252bfe31cc572049b7be75e84fd9d091effe9"} Jan 31 06:02:27 crc kubenswrapper[4712]: I0131 06:02:27.345116 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.159:5353: connect: connection refused" Jan 31 06:02:28 crc kubenswrapper[4712]: I0131 06:02:28.251941 4712 generic.go:334] "Generic (PLEG): container finished" podID="48089928-0ffc-4d25-adf9-57f7874477c2" containerID="b378a9997bde935cc0cd9c6355e147ced72ac937b80d86f1ca07d3fa7274b7c4" exitCode=0 Jan 31 06:02:28 crc kubenswrapper[4712]: I0131 06:02:28.251986 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerDied","Data":"b378a9997bde935cc0cd9c6355e147ced72ac937b80d86f1ca07d3fa7274b7c4"} Jan 31 06:02:28 crc kubenswrapper[4712]: I0131 06:02:28.922722 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 31 06:02:31 crc kubenswrapper[4712]: E0131 06:02:31.179240 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-openstackclient:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:02:31 crc kubenswrapper[4712]: E0131 06:02:31.179962 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-openstackclient:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:02:31 crc kubenswrapper[4712]: E0131 06:02:31.180115 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.rdoproject.org/podified-master-centos9/openstack-openstackclient:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n99h646hc7h55fh7h576h688h5ddh94h86h55dh5bch57bh4h569h546h57chb8h589h597h679h7bhbfh545h5c4h59fhb8h547h654hf5h5b7h574q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4s5zp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(5403021d-e6d4-4e4e-aa8e-8879f65f9f36): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:02:31 crc kubenswrapper[4712]: E0131 06:02:31.181538 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="5403021d-e6d4-4e4e-aa8e-8879f65f9f36" Jan 31 06:02:31 crc kubenswrapper[4712]: I0131 06:02:31.282575 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8557fb7df9-bhzkt" event={"ID":"2a8507c3-4b91-4b81-83ba-4bb63b3745f0","Type":"ContainerStarted","Data":"92911f97c8c9bff5b0b0a90d57bc4cbdd10e49a21540d9fc5a01ad5956adb704"} Jan 31 06:02:31 crc kubenswrapper[4712]: E0131 06:02:31.298224 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-openstackclient:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/openstackclient" podUID="5403021d-e6d4-4e4e-aa8e-8879f65f9f36" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.264878 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.325850 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" event={"ID":"bbd78d84-f023-4a93-8bf9-d94dce69f2f7","Type":"ContainerDied","Data":"911698e0ba603fc4e69f1721baf912c7c57b10de0e6b9cf6a862162e9e8dff6f"} Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.325918 4712 scope.go:117] "RemoveContainer" containerID="76be602ba041475184cf206243196e743eaa9b5ea468e89ed3dafcfa2b249987" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.326085 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f54678f-flzvs" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.348348 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8557fb7df9-bhzkt" event={"ID":"2a8507c3-4b91-4b81-83ba-4bb63b3745f0","Type":"ContainerStarted","Data":"caf33f87eb6a90071c2e690ed89887df76916a6a506c0a1a03ef28f697dd9559"} Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.401597 4712 scope.go:117] "RemoveContainer" containerID="01d085cea9ad88faeffb1fdf8298f7d1944e6f98b982af0c301e9968c281627f" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.437900 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.438003 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.438099 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.438272 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.438366 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.438415 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsrqr\" (UniqueName: \"kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr\") pod \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\" (UID: \"bbd78d84-f023-4a93-8bf9-d94dce69f2f7\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.449542 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr" (OuterVolumeSpecName: "kube-api-access-lsrqr") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "kube-api-access-lsrqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.470426 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3898-account-create-update-xd6dr"] Jan 31 06:02:32 crc kubenswrapper[4712]: W0131 06:02:32.511251 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5b506bd_07dd_439e_add3_1bd487999c1a.slice/crio-6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5 WatchSource:0}: Error finding container 6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5: Status 404 returned error can't find the container with id 6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5 Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.543690 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsrqr\" (UniqueName: \"kubernetes.io/projected/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-kube-api-access-lsrqr\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.658012 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.665702 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.688506 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config" (OuterVolumeSpecName: "config") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.690284 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.730666 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bbd78d84-f023-4a93-8bf9-d94dce69f2f7" (UID: "bbd78d84-f023-4a93-8bf9-d94dce69f2f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.771039 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.771069 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.771080 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.771091 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.771103 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bbd78d84-f023-4a93-8bf9-d94dce69f2f7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.818766 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.849141 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.876844 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.876959 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877044 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h562\" (UniqueName: \"kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877069 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877097 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877151 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877268 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.877311 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts\") pod \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\" (UID: \"4e6b448d-48ff-4e07-bdea-3b5849ceb177\") " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.878426 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.879295 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs" (OuterVolumeSpecName: "logs") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.886807 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562" (OuterVolumeSpecName: "kube-api-access-6h562") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "kube-api-access-6h562". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.887742 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.888264 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts" (OuterVolumeSpecName: "scripts") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.908296 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.995786 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.996224 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.996242 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h562\" (UniqueName: \"kubernetes.io/projected/4e6b448d-48ff-4e07-bdea-3b5849ceb177-kube-api-access-6h562\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.996253 4712 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:32 crc kubenswrapper[4712]: I0131 06:02:32.996262 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e6b448d-48ff-4e07-bdea-3b5849ceb177-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.016347 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xjh9f"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.068402 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6ea2-account-create-update-sk5tw"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.085432 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-hkgvt"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099339 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099417 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099447 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099593 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099637 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099683 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l6t7\" (UniqueName: \"kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099704 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099740 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099765 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljznw\" (UniqueName: \"kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099798 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099823 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099848 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099948 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.099976 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml\") pod \"48089928-0ffc-4d25-adf9-57f7874477c2\" (UID: \"48089928-0ffc-4d25-adf9-57f7874477c2\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.100024 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run\") pod \"003300cf-c4ba-470b-aba7-3c63a426e425\" (UID: \"003300cf-c4ba-470b-aba7-3c63a426e425\") " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.100930 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.102483 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.107114 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.107434 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs" (OuterVolumeSpecName: "logs") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.109501 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wmdr2"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.114551 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts" (OuterVolumeSpecName: "scripts") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.126068 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4029-account-create-update-2kvgn"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.129023 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7" (OuterVolumeSpecName: "kube-api-access-6l6t7") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "kube-api-access-6l6t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.140720 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.151066 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts" (OuterVolumeSpecName: "scripts") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.158561 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw" (OuterVolumeSpecName: "kube-api-access-ljznw") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "kube-api-access-ljznw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.161760 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.195939 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.209620 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210447 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210481 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210491 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210500 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l6t7\" (UniqueName: \"kubernetes.io/projected/48089928-0ffc-4d25-adf9-57f7874477c2-kube-api-access-6l6t7\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210511 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljznw\" (UniqueName: \"kubernetes.io/projected/003300cf-c4ba-470b-aba7-3c63a426e425-kube-api-access-ljznw\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210519 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210529 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210538 4712 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/003300cf-c4ba-470b-aba7-3c63a426e425-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210554 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48089928-0ffc-4d25-adf9-57f7874477c2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210562 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.210586 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.212498 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.243771 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f7f54678f-flzvs"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.267920 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.283359 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data" (OuterVolumeSpecName: "config-data") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.306708 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data" (OuterVolumeSpecName: "config-data") pod "4e6b448d-48ff-4e07-bdea-3b5849ceb177" (UID: "4e6b448d-48ff-4e07-bdea-3b5849ceb177"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.313433 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.313106 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.314264 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.314406 4712 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.322467 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.360091 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "003300cf-c4ba-470b-aba7-3c63a426e425" (UID: "003300cf-c4ba-470b-aba7-3c63a426e425"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.400039 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" event={"ID":"7431f59c-b102-4ea8-b8d4-f1d7f373af85","Type":"ContainerStarted","Data":"fc3264644a72bde4bf5964dac9bada3932c6a1f603a85cc0c9d6c690de2bb9e7"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.410779 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" event={"ID":"653c7ebd-bdbe-4ef2-910c-67ab033d8aad","Type":"ContainerStarted","Data":"a5df6fd2b2957b666ff9ecbbe4465606248add61bb0a627b347e272b3d9f6917"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.416665 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.416694 4712 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/003300cf-c4ba-470b-aba7-3c63a426e425-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.416707 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6b448d-48ff-4e07-bdea-3b5849ceb177-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.416715 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.418989 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4e6b448d-48ff-4e07-bdea-3b5849ceb177","Type":"ContainerDied","Data":"59804cd2914b42851c0d71a33d8ddbb2339b5fceebb841574929b74f4b280024"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.419070 4712 scope.go:117] "RemoveContainer" containerID="b0d408ba7e02fa15ef6bcc304bb0bbc26769dd7735728186d94bc386ebf0abcc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.419350 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.451567 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"48089928-0ffc-4d25-adf9-57f7874477c2","Type":"ContainerDied","Data":"6a86fb333d3686c641ae305e44e580b8eba500ad1b46cd8f6a115e745657c580"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.451612 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.457466 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hkgvt" event={"ID":"9db4b3a3-a250-4150-8ee3-15d770bd611b","Type":"ContainerStarted","Data":"c430c42e9d3875131d06ec097cdf563ef51e527ca12b6db7199ef749cd008775"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.459599 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3898-account-create-update-xd6dr" event={"ID":"e5b506bd-07dd-439e-add3-1bd487999c1a","Type":"ContainerStarted","Data":"e63b6d7a57ec4cfe71ab4b2e5eafea9f045af81c5b18f77ed541fbef1cb94cd3"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.459657 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3898-account-create-update-xd6dr" event={"ID":"e5b506bd-07dd-439e-add3-1bd487999c1a","Type":"ContainerStarted","Data":"6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.462414 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wmdr2" event={"ID":"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d","Type":"ContainerStarted","Data":"dc0cf38254ed10529ff533b4e2f800fed7c2011568b38a1871746395bdc83411"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.466485 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"003300cf-c4ba-470b-aba7-3c63a426e425","Type":"ContainerDied","Data":"920a2bdd98eea37a3b68bda43bda91b63c5925f1d2c1424a1855d2b7a3d2cd95"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.466599 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.477993 4712 scope.go:117] "RemoveContainer" containerID="53f0ebddd685206db7ac8cf26daaf377aff3e3de846de3284b0198d67695cdeb" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.486632 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-3898-account-create-update-xd6dr" podStartSLOduration=9.486610646 podStartE2EDuration="9.486610646s" podCreationTimestamp="2026-01-31 06:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:33.478932887 +0000 UTC m=+1419.572814728" watchObservedRunningTime="2026-01-31 06:02:33.486610646 +0000 UTC m=+1419.580492487" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.497565 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8557fb7df9-bhzkt" event={"ID":"2a8507c3-4b91-4b81-83ba-4bb63b3745f0","Type":"ContainerStarted","Data":"9791677501a659879c757f510982b3dd511a4a1d9d9b527e7896cc68a9b7a148"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.498237 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.500075 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.503610 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data" (OuterVolumeSpecName: "config-data") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.505470 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48089928-0ffc-4d25-adf9-57f7874477c2" (UID: "48089928-0ffc-4d25-adf9-57f7874477c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.518593 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xjh9f" event={"ID":"d3d1772f-7207-41bf-91bd-8d46663fecc6","Type":"ContainerStarted","Data":"809331eed1bf52de1f47fd2f3db2159b4ca42a076117464da38898d11be53dc3"} Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.518815 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.518856 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089928-0ffc-4d25-adf9-57f7874477c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.543703 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-8557fb7df9-bhzkt" podStartSLOduration=13.543682304 podStartE2EDuration="13.543682304s" podCreationTimestamp="2026-01-31 06:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:33.541142792 +0000 UTC m=+1419.635024633" watchObservedRunningTime="2026-01-31 06:02:33.543682304 +0000 UTC m=+1419.637564145" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.568438 4712 scope.go:117] "RemoveContainer" containerID="34f314dc3ff96ebea017b644faeab656aac94ace85cd9c4186f832cabf0debdd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.615511 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.656030 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.666060 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687000 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687684 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="dnsmasq-dns" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687711 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="dnsmasq-dns" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687733 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-notification-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687742 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-notification-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687756 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687765 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687780 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-central-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687787 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-central-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687800 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687810 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687823 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="init" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687830 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="init" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687844 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687852 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687877 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687885 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687896 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="sg-core" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687903 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="sg-core" Jan 31 06:02:33 crc kubenswrapper[4712]: E0131 06:02:33.687928 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.687935 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688152 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688199 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="sg-core" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688217 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-central-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688231 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" containerName="dnsmasq-dns" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688240 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688252 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688265 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" containerName="glance-log" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688275 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="proxy-httpd" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.688290 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" containerName="ceilometer-notification-agent" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.689669 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.692019 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.692737 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.692780 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.693668 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hlkbc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.732243 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.746245 4712 scope.go:117] "RemoveContainer" containerID="e9ce21874c22ee4c3a330ef845d7e62f675137fa0c5b9b17121818019cd5cec7" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.746346 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.758856 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.760747 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.764293 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.764463 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.768974 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.786754 4712 scope.go:117] "RemoveContainer" containerID="b378a9997bde935cc0cd9c6355e147ced72ac937b80d86f1ca07d3fa7274b7c4" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.814408 4712 scope.go:117] "RemoveContainer" containerID="04713538ecd0d6f2f98417d580ccc209c6f73c5a5eff465a0ecb7dc62c7009c7" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824483 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824535 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-logs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824576 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgl4v\" (UniqueName: \"kubernetes.io/projected/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-kube-api-access-sgl4v\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824625 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824708 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824740 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824766 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.824782 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.840871 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.844742 4712 scope.go:117] "RemoveContainer" containerID="05accd9ce82b1c1a73b172f4530252bfe31cc572049b7be75e84fd9d091effe9" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.854456 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.870249 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.873309 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.878344 4712 scope.go:117] "RemoveContainer" containerID="be41daf527ed6a6d257b76a18943d18245f49a797a3eda78204b1ab71eeef785" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.878815 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.878860 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.888324 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930266 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930365 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930408 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-logs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930437 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930471 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930501 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930574 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930689 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930729 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930781 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-logs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930890 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgl4v\" (UniqueName: \"kubernetes.io/projected/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-kube-api-access-sgl4v\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.930981 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.931061 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.931123 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.931191 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.931270 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4kxd\" (UniqueName: \"kubernetes.io/projected/b83454be-e489-429c-a4b3-8914ee18daa4-kube-api-access-w4kxd\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.932913 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.933187 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.935155 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-logs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.941622 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.950985 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.965824 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.969879 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:33 crc kubenswrapper[4712]: I0131 06:02:33.971798 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgl4v\" (UniqueName: \"kubernetes.io/projected/baae8e5b-9153-449a-92f2-34eb6cb7dbd3-kube-api-access-sgl4v\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033383 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033437 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033470 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr6lp\" (UniqueName: \"kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033496 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4kxd\" (UniqueName: \"kubernetes.io/projected/b83454be-e489-429c-a4b3-8914ee18daa4-kube-api-access-w4kxd\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033551 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033572 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-logs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033610 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033639 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033659 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033687 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033720 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033740 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033756 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033777 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.033805 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.034259 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.039070 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.040399 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.040725 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b83454be-e489-429c-a4b3-8914ee18daa4-logs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.043085 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"baae8e5b-9153-449a-92f2-34eb6cb7dbd3\") " pod="openstack/glance-default-internal-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.046355 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.047325 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-config-data\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.048248 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83454be-e489-429c-a4b3-8914ee18daa4-scripts\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.050187 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.065049 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4kxd\" (UniqueName: \"kubernetes.io/projected/b83454be-e489-429c-a4b3-8914ee18daa4-kube-api-access-w4kxd\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.094091 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"b83454be-e489-429c-a4b3-8914ee18daa4\") " pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137579 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137639 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137656 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137683 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137749 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr6lp\" (UniqueName: \"kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137833 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.137864 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.139164 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.139395 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.145756 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.146778 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.148139 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.151296 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.159009 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr6lp\" (UniqueName: \"kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp\") pod \"ceilometer-0\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.205565 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.386665 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.523568 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="003300cf-c4ba-470b-aba7-3c63a426e425" path="/var/lib/kubelet/pods/003300cf-c4ba-470b-aba7-3c63a426e425/volumes" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.524476 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48089928-0ffc-4d25-adf9-57f7874477c2" path="/var/lib/kubelet/pods/48089928-0ffc-4d25-adf9-57f7874477c2/volumes" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.531571 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e6b448d-48ff-4e07-bdea-3b5849ceb177" path="/var/lib/kubelet/pods/4e6b448d-48ff-4e07-bdea-3b5849ceb177/volumes" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.532869 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbd78d84-f023-4a93-8bf9-d94dce69f2f7" path="/var/lib/kubelet/pods/bbd78d84-f023-4a93-8bf9-d94dce69f2f7/volumes" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.540066 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hkgvt" event={"ID":"9db4b3a3-a250-4150-8ee3-15d770bd611b","Type":"ContainerStarted","Data":"caf65a3c5396b3e688e58ab1c721f8117eaf4313f391c99fe95e9b27a818229d"} Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.548197 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xjh9f" event={"ID":"d3d1772f-7207-41bf-91bd-8d46663fecc6","Type":"ContainerStarted","Data":"e74f9b430434fb24908ea766dd96c0d1ac4614ccd088c89f01542fc4ed13e8f0"} Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.552548 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" event={"ID":"7431f59c-b102-4ea8-b8d4-f1d7f373af85","Type":"ContainerStarted","Data":"b9aba50b03f7db3a75e805a701552877dc0d8050d83b04d7ba13b5759bb8593a"} Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.566716 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" event={"ID":"653c7ebd-bdbe-4ef2-910c-67ab033d8aad","Type":"ContainerStarted","Data":"242851c1a8694e51fd08ab74c666374d23d21aeb558c54f66421a86aacf72e83"} Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.573893 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wmdr2" event={"ID":"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d","Type":"ContainerStarted","Data":"99fa90bfc3b624390710250bf81efaec1774c15ef3534b5b8299005dbccc4f26"} Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.629298 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" podStartSLOduration=10.629277551 podStartE2EDuration="10.629277551s" podCreationTimestamp="2026-01-31 06:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:34.613438752 +0000 UTC m=+1420.707320593" watchObservedRunningTime="2026-01-31 06:02:34.629277551 +0000 UTC m=+1420.723159392" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.645508 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-hkgvt" podStartSLOduration=10.645482557 podStartE2EDuration="10.645482557s" podCreationTimestamp="2026-01-31 06:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:34.634075518 +0000 UTC m=+1420.727957359" watchObservedRunningTime="2026-01-31 06:02:34.645482557 +0000 UTC m=+1420.739364398" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.663659 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-xjh9f" podStartSLOduration=10.663635802 podStartE2EDuration="10.663635802s" podCreationTimestamp="2026-01-31 06:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:34.650569742 +0000 UTC m=+1420.744451583" watchObservedRunningTime="2026-01-31 06:02:34.663635802 +0000 UTC m=+1420.757517643" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.687284 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" podStartSLOduration=10.687259071 podStartE2EDuration="10.687259071s" podCreationTimestamp="2026-01-31 06:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:34.678940277 +0000 UTC m=+1420.772822118" watchObservedRunningTime="2026-01-31 06:02:34.687259071 +0000 UTC m=+1420.781140912" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.702955 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-wmdr2" podStartSLOduration=11.702931705 podStartE2EDuration="11.702931705s" podCreationTimestamp="2026-01-31 06:02:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:34.694111159 +0000 UTC m=+1420.787993000" watchObservedRunningTime="2026-01-31 06:02:34.702931705 +0000 UTC m=+1420.796813546" Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.807901 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.913091 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:34 crc kubenswrapper[4712]: I0131 06:02:34.968567 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.217490 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.601078 4712 generic.go:334] "Generic (PLEG): container finished" podID="7431f59c-b102-4ea8-b8d4-f1d7f373af85" containerID="b9aba50b03f7db3a75e805a701552877dc0d8050d83b04d7ba13b5759bb8593a" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.601932 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" event={"ID":"7431f59c-b102-4ea8-b8d4-f1d7f373af85","Type":"ContainerDied","Data":"b9aba50b03f7db3a75e805a701552877dc0d8050d83b04d7ba13b5759bb8593a"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.607730 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"baae8e5b-9153-449a-92f2-34eb6cb7dbd3","Type":"ContainerStarted","Data":"338b4b29c3c371a1d6a46717f5830e8eacb5dd59aa3bd86c5b72209a107b467e"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.620395 4712 generic.go:334] "Generic (PLEG): container finished" podID="9db4b3a3-a250-4150-8ee3-15d770bd611b" containerID="caf65a3c5396b3e688e58ab1c721f8117eaf4313f391c99fe95e9b27a818229d" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.620512 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hkgvt" event={"ID":"9db4b3a3-a250-4150-8ee3-15d770bd611b","Type":"ContainerDied","Data":"caf65a3c5396b3e688e58ab1c721f8117eaf4313f391c99fe95e9b27a818229d"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.627363 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b83454be-e489-429c-a4b3-8914ee18daa4","Type":"ContainerStarted","Data":"dbd07f1fb5d3a21245a62aec1e314f3d23a612e4a3067e31e5503f70392c3582"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.634455 4712 generic.go:334] "Generic (PLEG): container finished" podID="eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" containerID="99fa90bfc3b624390710250bf81efaec1774c15ef3534b5b8299005dbccc4f26" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.634558 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wmdr2" event={"ID":"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d","Type":"ContainerDied","Data":"99fa90bfc3b624390710250bf81efaec1774c15ef3534b5b8299005dbccc4f26"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.645860 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerStarted","Data":"4b57c7405b00f7355eef8a50703387733824a3461ff80efb37b06788cf6190da"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.647356 4712 generic.go:334] "Generic (PLEG): container finished" podID="d3d1772f-7207-41bf-91bd-8d46663fecc6" containerID="e74f9b430434fb24908ea766dd96c0d1ac4614ccd088c89f01542fc4ed13e8f0" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.647492 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xjh9f" event={"ID":"d3d1772f-7207-41bf-91bd-8d46663fecc6","Type":"ContainerDied","Data":"e74f9b430434fb24908ea766dd96c0d1ac4614ccd088c89f01542fc4ed13e8f0"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.675598 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" event={"ID":"653c7ebd-bdbe-4ef2-910c-67ab033d8aad","Type":"ContainerDied","Data":"242851c1a8694e51fd08ab74c666374d23d21aeb558c54f66421a86aacf72e83"} Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.668137 4712 generic.go:334] "Generic (PLEG): container finished" podID="653c7ebd-bdbe-4ef2-910c-67ab033d8aad" containerID="242851c1a8694e51fd08ab74c666374d23d21aeb558c54f66421a86aacf72e83" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.691330 4712 generic.go:334] "Generic (PLEG): container finished" podID="e5b506bd-07dd-439e-add3-1bd487999c1a" containerID="e63b6d7a57ec4cfe71ab4b2e5eafea9f045af81c5b18f77ed541fbef1cb94cd3" exitCode=0 Jan 31 06:02:35 crc kubenswrapper[4712]: I0131 06:02:35.691399 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3898-account-create-update-xd6dr" event={"ID":"e5b506bd-07dd-439e-add3-1bd487999c1a","Type":"ContainerDied","Data":"e63b6d7a57ec4cfe71ab4b2e5eafea9f045af81c5b18f77ed541fbef1cb94cd3"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.130078 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.592312 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.709569 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b83454be-e489-429c-a4b3-8914ee18daa4","Type":"ContainerStarted","Data":"16c09031e3fde4571bdcd5eef38d0b311b736452eb6afc0ccc4965b6eb7470db"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.720767 4712 generic.go:334] "Generic (PLEG): container finished" podID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerID="c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e" exitCode=137 Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.720883 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.720908 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerDied","Data":"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.720990 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ca81b096-cfb6-4ce9-a252-ceeb36cf9914","Type":"ContainerDied","Data":"94324a4d9442b9cd67fa074a81624496b968712fd194b96dfe48decd4877631f"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.721075 4712 scope.go:117] "RemoveContainer" containerID="c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.727790 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"baae8e5b-9153-449a-92f2-34eb6cb7dbd3","Type":"ContainerStarted","Data":"d99384220f68dc797b3bfb27f18b6800344bafae729a57dd4be85a6564a97231"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.741874 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnksj\" (UniqueName: \"kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.741920 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.742007 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.742137 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.742195 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.742252 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.742271 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts\") pod \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\" (UID: \"ca81b096-cfb6-4ce9-a252-ceeb36cf9914\") " Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.743450 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.743889 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs" (OuterVolumeSpecName: "logs") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.749291 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts" (OuterVolumeSpecName: "scripts") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.751088 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.755069 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerStarted","Data":"d3b54189e99563370fb91fdb9d2e813824bb4ebea3440d075acaeb9853f3766c"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.755114 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerStarted","Data":"5bc9a16fb937a6e002c81073ab4a373fd30583a51e52d79e27ee0e1e4ffb1973"} Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.759461 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj" (OuterVolumeSpecName: "kube-api-access-bnksj") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "kube-api-access-bnksj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.789297 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.790267 4712 scope.go:117] "RemoveContainer" containerID="5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.835322 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data" (OuterVolumeSpecName: "config-data") pod "ca81b096-cfb6-4ce9-a252-ceeb36cf9914" (UID: "ca81b096-cfb6-4ce9-a252-ceeb36cf9914"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844312 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnksj\" (UniqueName: \"kubernetes.io/projected/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-kube-api-access-bnksj\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844348 4712 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844359 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844368 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844379 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844390 4712 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.844398 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca81b096-cfb6-4ce9-a252-ceeb36cf9914-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.854340 4712 scope.go:117] "RemoveContainer" containerID="c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e" Jan 31 06:02:36 crc kubenswrapper[4712]: E0131 06:02:36.857407 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e\": container with ID starting with c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e not found: ID does not exist" containerID="c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.857452 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e"} err="failed to get container status \"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e\": rpc error: code = NotFound desc = could not find container \"c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e\": container with ID starting with c461c44a0f5edea05900165728fc419b20ef24c67ba4249c8f3c9bc17c82e04e not found: ID does not exist" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.857476 4712 scope.go:117] "RemoveContainer" containerID="5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f" Jan 31 06:02:36 crc kubenswrapper[4712]: E0131 06:02:36.860723 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f\": container with ID starting with 5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f not found: ID does not exist" containerID="5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f" Jan 31 06:02:36 crc kubenswrapper[4712]: I0131 06:02:36.860759 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f"} err="failed to get container status \"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f\": rpc error: code = NotFound desc = could not find container \"5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f\": container with ID starting with 5ecd82f3235d748eb111fa411e23053ed2cfb86fd728e72472c9ba013c6de39f not found: ID does not exist" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.069774 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.099522 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.111844 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:37 crc kubenswrapper[4712]: E0131 06:02:37.112450 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.112466 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api" Jan 31 06:02:37 crc kubenswrapper[4712]: E0131 06:02:37.112486 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api-log" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.112492 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api-log" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.112715 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api-log" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.112729 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" containerName="cinder-api" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.114077 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.117373 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.117462 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.117600 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.132594 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262216 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262657 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262686 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262708 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-scripts\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262746 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262820 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkgqq\" (UniqueName: \"kubernetes.io/projected/f1761086-d010-4dcc-91fb-a9503805de81-kube-api-access-fkgqq\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262861 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1761086-d010-4dcc-91fb-a9503805de81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262908 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.262957 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1761086-d010-4dcc-91fb-a9503805de81-logs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.367975 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.368042 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.368108 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-scripts\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.368378 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkgqq\" (UniqueName: \"kubernetes.io/projected/f1761086-d010-4dcc-91fb-a9503805de81-kube-api-access-fkgqq\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369590 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1761086-d010-4dcc-91fb-a9503805de81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369690 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369787 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1761086-d010-4dcc-91fb-a9503805de81-logs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369815 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1761086-d010-4dcc-91fb-a9503805de81-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.369860 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.370325 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1761086-d010-4dcc-91fb-a9503805de81-logs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.379995 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.381034 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-scripts\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.389527 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.389929 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.390626 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.401749 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkgqq\" (UniqueName: \"kubernetes.io/projected/f1761086-d010-4dcc-91fb-a9503805de81-kube-api-access-fkgqq\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.402009 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1761086-d010-4dcc-91fb-a9503805de81-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1761086-d010-4dcc-91fb-a9503805de81\") " pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.510210 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.564902 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.669693 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.677691 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts\") pod \"9db4b3a3-a250-4150-8ee3-15d770bd611b\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.678045 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-844dj\" (UniqueName: \"kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj\") pod \"9db4b3a3-a250-4150-8ee3-15d770bd611b\" (UID: \"9db4b3a3-a250-4150-8ee3-15d770bd611b\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.680132 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9db4b3a3-a250-4150-8ee3-15d770bd611b" (UID: "9db4b3a3-a250-4150-8ee3-15d770bd611b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.689581 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.698287 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj" (OuterVolumeSpecName: "kube-api-access-844dj") pod "9db4b3a3-a250-4150-8ee3-15d770bd611b" (UID: "9db4b3a3-a250-4150-8ee3-15d770bd611b"). InnerVolumeSpecName "kube-api-access-844dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.745798 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.771540 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.775063 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781011 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts\") pod \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781087 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfr7s\" (UniqueName: \"kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s\") pod \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\" (UID: \"7431f59c-b102-4ea8-b8d4-f1d7f373af85\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781149 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts\") pod \"d3d1772f-7207-41bf-91bd-8d46663fecc6\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781316 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6r7s\" (UniqueName: \"kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s\") pod \"d3d1772f-7207-41bf-91bd-8d46663fecc6\" (UID: \"d3d1772f-7207-41bf-91bd-8d46663fecc6\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781871 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db4b3a3-a250-4150-8ee3-15d770bd611b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.781886 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-844dj\" (UniqueName: \"kubernetes.io/projected/9db4b3a3-a250-4150-8ee3-15d770bd611b-kube-api-access-844dj\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.784361 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d3d1772f-7207-41bf-91bd-8d46663fecc6" (UID: "d3d1772f-7207-41bf-91bd-8d46663fecc6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.784551 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wmdr2" event={"ID":"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d","Type":"ContainerDied","Data":"dc0cf38254ed10529ff533b4e2f800fed7c2011568b38a1871746395bdc83411"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.784581 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc0cf38254ed10529ff533b4e2f800fed7c2011568b38a1871746395bdc83411" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.784571 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s" (OuterVolumeSpecName: "kube-api-access-dfr7s") pod "7431f59c-b102-4ea8-b8d4-f1d7f373af85" (UID: "7431f59c-b102-4ea8-b8d4-f1d7f373af85"). InnerVolumeSpecName "kube-api-access-dfr7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.784644 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wmdr2" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.785771 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7431f59c-b102-4ea8-b8d4-f1d7f373af85" (UID: "7431f59c-b102-4ea8-b8d4-f1d7f373af85"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.788667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hkgvt" event={"ID":"9db4b3a3-a250-4150-8ee3-15d770bd611b","Type":"ContainerDied","Data":"c430c42e9d3875131d06ec097cdf563ef51e527ca12b6db7199ef749cd008775"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.788719 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c430c42e9d3875131d06ec097cdf563ef51e527ca12b6db7199ef749cd008775" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.788786 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hkgvt" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.810214 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xjh9f" event={"ID":"d3d1772f-7207-41bf-91bd-8d46663fecc6","Type":"ContainerDied","Data":"809331eed1bf52de1f47fd2f3db2159b4ca42a076117464da38898d11be53dc3"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.810631 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="809331eed1bf52de1f47fd2f3db2159b4ca42a076117464da38898d11be53dc3" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.810795 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xjh9f" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.811794 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s" (OuterVolumeSpecName: "kube-api-access-f6r7s") pod "d3d1772f-7207-41bf-91bd-8d46663fecc6" (UID: "d3d1772f-7207-41bf-91bd-8d46663fecc6"). InnerVolumeSpecName "kube-api-access-f6r7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.836528 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b83454be-e489-429c-a4b3-8914ee18daa4","Type":"ContainerStarted","Data":"9cae3d5af9fd7f8f871835700ab1399d2465621948b1952de0046fac90fe2e49"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.888696 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts\") pod \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.888756 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts\") pod \"e5b506bd-07dd-439e-add3-1bd487999c1a\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.888852 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts\") pod \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.888900 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfbh6\" (UniqueName: \"kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6\") pod \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\" (UID: \"eb27f5c1-d0a0-4aa7-97ee-e271d63a722d\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.888997 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9nw8\" (UniqueName: \"kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8\") pod \"e5b506bd-07dd-439e-add3-1bd487999c1a\" (UID: \"e5b506bd-07dd-439e-add3-1bd487999c1a\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.889251 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcfqt\" (UniqueName: \"kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt\") pod \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\" (UID: \"653c7ebd-bdbe-4ef2-910c-67ab033d8aad\") " Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.892535 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "653c7ebd-bdbe-4ef2-910c-67ab033d8aad" (UID: "653c7ebd-bdbe-4ef2-910c-67ab033d8aad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.892979 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" (UID: "eb27f5c1-d0a0-4aa7-97ee-e271d63a722d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.893741 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7431f59c-b102-4ea8-b8d4-f1d7f373af85-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894562 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfr7s\" (UniqueName: \"kubernetes.io/projected/7431f59c-b102-4ea8-b8d4-f1d7f373af85-kube-api-access-dfr7s\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894749 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3d1772f-7207-41bf-91bd-8d46663fecc6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894815 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894874 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6r7s\" (UniqueName: \"kubernetes.io/projected/d3d1772f-7207-41bf-91bd-8d46663fecc6-kube-api-access-f6r7s\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894935 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.894220 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5b506bd-07dd-439e-add3-1bd487999c1a" (UID: "e5b506bd-07dd-439e-add3-1bd487999c1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.897134 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8" (OuterVolumeSpecName: "kube-api-access-d9nw8") pod "e5b506bd-07dd-439e-add3-1bd487999c1a" (UID: "e5b506bd-07dd-439e-add3-1bd487999c1a"). InnerVolumeSpecName "kube-api-access-d9nw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.897915 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" event={"ID":"653c7ebd-bdbe-4ef2-910c-67ab033d8aad","Type":"ContainerDied","Data":"a5df6fd2b2957b666ff9ecbbe4465606248add61bb0a627b347e272b3d9f6917"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.898145 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5df6fd2b2957b666ff9ecbbe4465606248add61bb0a627b347e272b3d9f6917" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.898295 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4029-account-create-update-2kvgn" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.898813 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt" (OuterVolumeSpecName: "kube-api-access-lcfqt") pod "653c7ebd-bdbe-4ef2-910c-67ab033d8aad" (UID: "653c7ebd-bdbe-4ef2-910c-67ab033d8aad"). InnerVolumeSpecName "kube-api-access-lcfqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.905349 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3898-account-create-update-xd6dr" event={"ID":"e5b506bd-07dd-439e-add3-1bd487999c1a","Type":"ContainerDied","Data":"6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.905661 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ed3f897400f0374cb093c78786bbd11eb209ac999805e46b7ce994b642b9da5" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.905639 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3898-account-create-update-xd6dr" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.909642 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" event={"ID":"7431f59c-b102-4ea8-b8d4-f1d7f373af85","Type":"ContainerDied","Data":"fc3264644a72bde4bf5964dac9bada3932c6a1f603a85cc0c9d6c690de2bb9e7"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.909917 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc3264644a72bde4bf5964dac9bada3932c6a1f603a85cc0c9d6c690de2bb9e7" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.909897 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ea2-account-create-update-sk5tw" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.919363 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"baae8e5b-9153-449a-92f2-34eb6cb7dbd3","Type":"ContainerStarted","Data":"a029896cae2547ed7302e4ae9997b40aa7340b970bd66aeabb0b4aa903f58015"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.926504 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6" (OuterVolumeSpecName: "kube-api-access-mfbh6") pod "eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" (UID: "eb27f5c1-d0a0-4aa7-97ee-e271d63a722d"). InnerVolumeSpecName "kube-api-access-mfbh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.935231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerStarted","Data":"17211288168c22f4fb983c0f019621aca90a1887a7fffc90aa2cde6e83f28ac7"} Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.993914 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.993891624 podStartE2EDuration="4.993891624s" podCreationTimestamp="2026-01-31 06:02:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:37.896956672 +0000 UTC m=+1423.990838513" watchObservedRunningTime="2026-01-31 06:02:37.993891624 +0000 UTC m=+1424.087773465" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.999677 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcfqt\" (UniqueName: \"kubernetes.io/projected/653c7ebd-bdbe-4ef2-910c-67ab033d8aad-kube-api-access-lcfqt\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.999797 4712 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5b506bd-07dd-439e-add3-1bd487999c1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.999872 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfbh6\" (UniqueName: \"kubernetes.io/projected/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d-kube-api-access-mfbh6\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:37 crc kubenswrapper[4712]: I0131 06:02:37.999928 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9nw8\" (UniqueName: \"kubernetes.io/projected/e5b506bd-07dd-439e-add3-1bd487999c1a-kube-api-access-d9nw8\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:38 crc kubenswrapper[4712]: I0131 06:02:38.009020 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.008998599 podStartE2EDuration="5.008998599s" podCreationTimestamp="2026-01-31 06:02:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:37.9505171 +0000 UTC m=+1424.044398941" watchObservedRunningTime="2026-01-31 06:02:38.008998599 +0000 UTC m=+1424.102880440" Jan 31 06:02:38 crc kubenswrapper[4712]: I0131 06:02:38.319013 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 31 06:02:38 crc kubenswrapper[4712]: I0131 06:02:38.515316 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca81b096-cfb6-4ce9-a252-ceeb36cf9914" path="/var/lib/kubelet/pods/ca81b096-cfb6-4ce9-a252-ceeb36cf9914/volumes" Jan 31 06:02:38 crc kubenswrapper[4712]: I0131 06:02:38.951570 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1761086-d010-4dcc-91fb-a9503805de81","Type":"ContainerStarted","Data":"a217479846890cecbf1c94bb5804a55ea105e2aaa0976ea8dd91f1a679e897e0"} Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.788813 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l9r99"] Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.789873 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3d1772f-7207-41bf-91bd-8d46663fecc6" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.789894 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3d1772f-7207-41bf-91bd-8d46663fecc6" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.789927 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653c7ebd-bdbe-4ef2-910c-67ab033d8aad" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.789935 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="653c7ebd-bdbe-4ef2-910c-67ab033d8aad" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.789952 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7431f59c-b102-4ea8-b8d4-f1d7f373af85" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.789958 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="7431f59c-b102-4ea8-b8d4-f1d7f373af85" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.789973 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db4b3a3-a250-4150-8ee3-15d770bd611b" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.789979 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db4b3a3-a250-4150-8ee3-15d770bd611b" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.790015 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790023 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: E0131 06:02:39.790041 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b506bd-07dd-439e-add3-1bd487999c1a" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790046 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b506bd-07dd-439e-add3-1bd487999c1a" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790346 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="653c7ebd-bdbe-4ef2-910c-67ab033d8aad" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790373 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="7431f59c-b102-4ea8-b8d4-f1d7f373af85" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790383 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b506bd-07dd-439e-add3-1bd487999c1a" containerName="mariadb-account-create-update" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790436 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790451 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3d1772f-7207-41bf-91bd-8d46663fecc6" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.790463 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db4b3a3-a250-4150-8ee3-15d770bd611b" containerName="mariadb-database-create" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.791444 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.793082 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.794406 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.794594 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-52vzn" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.803606 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l9r99"] Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.849154 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfzpm\" (UniqueName: \"kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.849839 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.849892 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.849919 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.952054 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfzpm\" (UniqueName: \"kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.952123 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.952159 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.952193 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.960082 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.960142 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.960751 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.985675 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfzpm\" (UniqueName: \"kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm\") pod \"nova-cell0-conductor-db-sync-l9r99\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.986230 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1761086-d010-4dcc-91fb-a9503805de81","Type":"ContainerStarted","Data":"19181f82c0fb2d71b1630c8bbb04ba41c73e94856eea4e2537467f1bab01b2d0"} Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.986281 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1761086-d010-4dcc-91fb-a9503805de81","Type":"ContainerStarted","Data":"30157439eceb662411d4d9b15415f2348cdbc000883a1fb4b732e6adb6192983"} Jan 31 06:02:39 crc kubenswrapper[4712]: I0131 06:02:39.989261 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 31 06:02:40 crc kubenswrapper[4712]: I0131 06:02:40.016748 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.016722835 podStartE2EDuration="3.016722835s" podCreationTimestamp="2026-01-31 06:02:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:02:40.014765497 +0000 UTC m=+1426.108647338" watchObservedRunningTime="2026-01-31 06:02:40.016722835 +0000 UTC m=+1426.110604676" Jan 31 06:02:40 crc kubenswrapper[4712]: I0131 06:02:40.270776 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:02:40 crc kubenswrapper[4712]: I0131 06:02:40.797378 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l9r99"] Jan 31 06:02:40 crc kubenswrapper[4712]: W0131 06:02:40.800659 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaedf7767_82ba_4f3b_a34e_456f148922a4.slice/crio-5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab WatchSource:0}: Error finding container 5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab: Status 404 returned error can't find the container with id 5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab Jan 31 06:02:40 crc kubenswrapper[4712]: I0131 06:02:40.999521 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerStarted","Data":"f219ce9d178854de47debaba82f50c10375eeaeb12058d877cf6da56faf7c747"} Jan 31 06:02:41 crc kubenswrapper[4712]: I0131 06:02:41.000895 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:02:41 crc kubenswrapper[4712]: I0131 06:02:41.006024 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l9r99" event={"ID":"aedf7767-82ba-4f3b-a34e-456f148922a4","Type":"ContainerStarted","Data":"5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab"} Jan 31 06:02:41 crc kubenswrapper[4712]: I0131 06:02:41.028300 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.258550113 podStartE2EDuration="8.028277955s" podCreationTimestamp="2026-01-31 06:02:33 +0000 UTC" firstStartedPulling="2026-01-31 06:02:34.96833055 +0000 UTC m=+1421.062212391" lastFinishedPulling="2026-01-31 06:02:39.738058392 +0000 UTC m=+1425.831940233" observedRunningTime="2026-01-31 06:02:41.027585687 +0000 UTC m=+1427.121467548" watchObservedRunningTime="2026-01-31 06:02:41.028277955 +0000 UTC m=+1427.122159786" Jan 31 06:02:41 crc kubenswrapper[4712]: I0131 06:02:41.116282 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8557fb7df9-bhzkt" Jan 31 06:02:43 crc kubenswrapper[4712]: I0131 06:02:43.766673 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.042626 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5403021d-e6d4-4e4e-aa8e-8879f65f9f36","Type":"ContainerStarted","Data":"9fb3a05855069b0b9db589232e4d9e57434c1909b6da3cdd53ebbc046670c410"} Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.050690 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.050747 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.075325 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.648083493 podStartE2EDuration="34.075303539s" podCreationTimestamp="2026-01-31 06:02:10 +0000 UTC" firstStartedPulling="2026-01-31 06:02:11.476832588 +0000 UTC m=+1397.570714429" lastFinishedPulling="2026-01-31 06:02:42.904052624 +0000 UTC m=+1428.997934475" observedRunningTime="2026-01-31 06:02:44.067908476 +0000 UTC m=+1430.161790317" watchObservedRunningTime="2026-01-31 06:02:44.075303539 +0000 UTC m=+1430.169185380" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.098431 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.116728 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.387772 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.387824 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.439236 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 31 06:02:44 crc kubenswrapper[4712]: I0131 06:02:44.443119 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 31 06:02:45 crc kubenswrapper[4712]: I0131 06:02:45.053031 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:45 crc kubenswrapper[4712]: I0131 06:02:45.053407 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:02:45 crc kubenswrapper[4712]: I0131 06:02:45.053420 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 31 06:02:45 crc kubenswrapper[4712]: I0131 06:02:45.053430 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:46 crc kubenswrapper[4712]: I0131 06:02:46.566632 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6445df85d9-7dknt" Jan 31 06:02:46 crc kubenswrapper[4712]: I0131 06:02:46.650803 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:46 crc kubenswrapper[4712]: I0131 06:02:46.651040 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-77dc4b7484-plhfq" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-api" containerID="cri-o://9c3af8b24418c9521385fbe3496ea55dbfc0135a3325bb5b0e3a8027d8e6606f" gracePeriod=30 Jan 31 06:02:46 crc kubenswrapper[4712]: I0131 06:02:46.651330 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-77dc4b7484-plhfq" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-httpd" containerID="cri-o://4a1e8a9ff09ef41761f615f9444ad1d74ec714281513e738e056eb11c837f8f7" gracePeriod=30 Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.074301 4712 generic.go:334] "Generic (PLEG): container finished" podID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerID="4a1e8a9ff09ef41761f615f9444ad1d74ec714281513e738e056eb11c837f8f7" exitCode=0 Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.074374 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerDied","Data":"4a1e8a9ff09ef41761f615f9444ad1d74ec714281513e738e056eb11c837f8f7"} Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.312289 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.312439 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.442872 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.620687 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 31 06:02:47 crc kubenswrapper[4712]: I0131 06:02:47.620822 4712 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 31 06:02:48 crc kubenswrapper[4712]: I0131 06:02:48.086112 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 31 06:02:50 crc kubenswrapper[4712]: I0131 06:02:50.143336 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.651062 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.651796 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-central-agent" containerID="cri-o://5bc9a16fb937a6e002c81073ab4a373fd30583a51e52d79e27ee0e1e4ffb1973" gracePeriod=30 Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.651855 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="proxy-httpd" containerID="cri-o://f219ce9d178854de47debaba82f50c10375eeaeb12058d877cf6da56faf7c747" gracePeriod=30 Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.651933 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="sg-core" containerID="cri-o://17211288168c22f4fb983c0f019621aca90a1887a7fffc90aa2cde6e83f28ac7" gracePeriod=30 Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.652304 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-notification-agent" containerID="cri-o://d3b54189e99563370fb91fdb9d2e813824bb4ebea3440d075acaeb9853f3766c" gracePeriod=30 Jan 31 06:02:53 crc kubenswrapper[4712]: I0131 06:02:53.664476 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.176:3000/\": EOF" Jan 31 06:02:54 crc kubenswrapper[4712]: I0131 06:02:54.161611 4712 generic.go:334] "Generic (PLEG): container finished" podID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerID="f219ce9d178854de47debaba82f50c10375eeaeb12058d877cf6da56faf7c747" exitCode=0 Jan 31 06:02:54 crc kubenswrapper[4712]: I0131 06:02:54.161647 4712 generic.go:334] "Generic (PLEG): container finished" podID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerID="17211288168c22f4fb983c0f019621aca90a1887a7fffc90aa2cde6e83f28ac7" exitCode=2 Jan 31 06:02:54 crc kubenswrapper[4712]: I0131 06:02:54.161667 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerDied","Data":"f219ce9d178854de47debaba82f50c10375eeaeb12058d877cf6da56faf7c747"} Jan 31 06:02:54 crc kubenswrapper[4712]: I0131 06:02:54.161691 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerDied","Data":"17211288168c22f4fb983c0f019621aca90a1887a7fffc90aa2cde6e83f28ac7"} Jan 31 06:02:55 crc kubenswrapper[4712]: I0131 06:02:55.194532 4712 generic.go:334] "Generic (PLEG): container finished" podID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerID="9c3af8b24418c9521385fbe3496ea55dbfc0135a3325bb5b0e3a8027d8e6606f" exitCode=0 Jan 31 06:02:55 crc kubenswrapper[4712]: I0131 06:02:55.194618 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerDied","Data":"9c3af8b24418c9521385fbe3496ea55dbfc0135a3325bb5b0e3a8027d8e6606f"} Jan 31 06:02:55 crc kubenswrapper[4712]: I0131 06:02:55.199160 4712 generic.go:334] "Generic (PLEG): container finished" podID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerID="5bc9a16fb937a6e002c81073ab4a373fd30583a51e52d79e27ee0e1e4ffb1973" exitCode=0 Jan 31 06:02:55 crc kubenswrapper[4712]: I0131 06:02:55.199190 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerDied","Data":"5bc9a16fb937a6e002c81073ab4a373fd30583a51e52d79e27ee0e1e4ffb1973"} Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.242253 4712 generic.go:334] "Generic (PLEG): container finished" podID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerID="d3b54189e99563370fb91fdb9d2e813824bb4ebea3440d075acaeb9853f3766c" exitCode=0 Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.242896 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerDied","Data":"d3b54189e99563370fb91fdb9d2e813824bb4ebea3440d075acaeb9853f3766c"} Jan 31 06:02:57 crc kubenswrapper[4712]: E0131 06:02:57.697992 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-nova-conductor:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:02:57 crc kubenswrapper[4712]: E0131 06:02:57.698343 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-nova-conductor:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:02:57 crc kubenswrapper[4712]: E0131 06:02:57.698485 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.rdoproject.org/podified-master-centos9/openstack-nova-conductor:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tfzpm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-l9r99_openstack(aedf7767-82ba-4f3b-a34e-456f148922a4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:02:57 crc kubenswrapper[4712]: E0131 06:02:57.699764 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-l9r99" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.759655 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.882514 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config\") pod \"ce5eb4c6-42a2-463c-92d3-baca84929eed\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.882790 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncpcf\" (UniqueName: \"kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf\") pod \"ce5eb4c6-42a2-463c-92d3-baca84929eed\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.882829 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs\") pod \"ce5eb4c6-42a2-463c-92d3-baca84929eed\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.883432 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config\") pod \"ce5eb4c6-42a2-463c-92d3-baca84929eed\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.883511 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle\") pod \"ce5eb4c6-42a2-463c-92d3-baca84929eed\" (UID: \"ce5eb4c6-42a2-463c-92d3-baca84929eed\") " Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.890659 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "ce5eb4c6-42a2-463c-92d3-baca84929eed" (UID: "ce5eb4c6-42a2-463c-92d3-baca84929eed"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.896451 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf" (OuterVolumeSpecName: "kube-api-access-ncpcf") pod "ce5eb4c6-42a2-463c-92d3-baca84929eed" (UID: "ce5eb4c6-42a2-463c-92d3-baca84929eed"). InnerVolumeSpecName "kube-api-access-ncpcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.947601 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce5eb4c6-42a2-463c-92d3-baca84929eed" (UID: "ce5eb4c6-42a2-463c-92d3-baca84929eed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.956514 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.982299 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config" (OuterVolumeSpecName: "config") pod "ce5eb4c6-42a2-463c-92d3-baca84929eed" (UID: "ce5eb4c6-42a2-463c-92d3-baca84929eed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.986033 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncpcf\" (UniqueName: \"kubernetes.io/projected/ce5eb4c6-42a2-463c-92d3-baca84929eed-kube-api-access-ncpcf\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.986059 4712 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.986069 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.986078 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:57 crc kubenswrapper[4712]: I0131 06:02:57.990454 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "ce5eb4c6-42a2-463c-92d3-baca84929eed" (UID: "ce5eb4c6-42a2-463c-92d3-baca84929eed"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.086829 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.086891 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.086968 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087036 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087062 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087095 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr6lp\" (UniqueName: \"kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087129 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd\") pod \"92f3c0de-8b85-4430-99d6-b7884720e31b\" (UID: \"92f3c0de-8b85-4430-99d6-b7884720e31b\") " Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087577 4712 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce5eb4c6-42a2-463c-92d3-baca84929eed-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087762 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.087911 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.090504 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp" (OuterVolumeSpecName: "kube-api-access-vr6lp") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "kube-api-access-vr6lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.094284 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts" (OuterVolumeSpecName: "scripts") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.141690 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.173844 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.183745 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data" (OuterVolumeSpecName: "config-data") pod "92f3c0de-8b85-4430-99d6-b7884720e31b" (UID: "92f3c0de-8b85-4430-99d6-b7884720e31b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190114 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190152 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190166 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190190 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f3c0de-8b85-4430-99d6-b7884720e31b-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190199 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190207 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr6lp\" (UniqueName: \"kubernetes.io/projected/92f3c0de-8b85-4430-99d6-b7884720e31b-kube-api-access-vr6lp\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.190217 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f3c0de-8b85-4430-99d6-b7884720e31b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.254825 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-77dc4b7484-plhfq" event={"ID":"ce5eb4c6-42a2-463c-92d3-baca84929eed","Type":"ContainerDied","Data":"2190e6c0f464b23ca4b2f9284b60ca56ca911d5012963cf0e6b83c5e19ed5d11"} Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.254888 4712 scope.go:117] "RemoveContainer" containerID="4a1e8a9ff09ef41761f615f9444ad1d74ec714281513e738e056eb11c837f8f7" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.255030 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-77dc4b7484-plhfq" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.270817 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92f3c0de-8b85-4430-99d6-b7884720e31b","Type":"ContainerDied","Data":"4b57c7405b00f7355eef8a50703387733824a3461ff80efb37b06788cf6190da"} Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.271022 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.298679 4712 scope.go:117] "RemoveContainer" containerID="9c3af8b24418c9521385fbe3496ea55dbfc0135a3325bb5b0e3a8027d8e6606f" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.298889 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-nova-conductor:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-l9r99" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.313349 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.333959 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-77dc4b7484-plhfq"] Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.363051 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.369871 4712 scope.go:117] "RemoveContainer" containerID="f219ce9d178854de47debaba82f50c10375eeaeb12058d877cf6da56faf7c747" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.374143 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.391805 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392326 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392338 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392360 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-api" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392367 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-api" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392385 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="proxy-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392391 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="proxy-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392405 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-notification-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392410 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-notification-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392421 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-central-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392428 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-central-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: E0131 06:02:58.392448 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="sg-core" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392455 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="sg-core" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392642 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-notification-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392656 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-api" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392670 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="proxy-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392683 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="sg-core" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392696 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" containerName="ceilometer-central-agent" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.392707 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" containerName="neutron-httpd" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.394868 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.400760 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.401212 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.402192 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.413521 4712 scope.go:117] "RemoveContainer" containerID="17211288168c22f4fb983c0f019621aca90a1887a7fffc90aa2cde6e83f28ac7" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.483818 4712 scope.go:117] "RemoveContainer" containerID="d3b54189e99563370fb91fdb9d2e813824bb4ebea3440d075acaeb9853f3766c" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502060 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv4wn\" (UniqueName: \"kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502101 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502151 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502188 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502289 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502410 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.502466 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.516696 4712 scope.go:117] "RemoveContainer" containerID="5bc9a16fb937a6e002c81073ab4a373fd30583a51e52d79e27ee0e1e4ffb1973" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.524376 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92f3c0de-8b85-4430-99d6-b7884720e31b" path="/var/lib/kubelet/pods/92f3c0de-8b85-4430-99d6-b7884720e31b/volumes" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.525128 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce5eb4c6-42a2-463c-92d3-baca84929eed" path="/var/lib/kubelet/pods/ce5eb4c6-42a2-463c-92d3-baca84929eed/volumes" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604302 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604369 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604523 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv4wn\" (UniqueName: \"kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604545 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604627 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604675 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.604846 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.605533 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.605862 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.615134 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.633535 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.634452 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.635978 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.650266 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv4wn\" (UniqueName: \"kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn\") pod \"ceilometer-0\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " pod="openstack/ceilometer-0" Jan 31 06:02:58 crc kubenswrapper[4712]: I0131 06:02:58.715507 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:02:59 crc kubenswrapper[4712]: I0131 06:02:59.232680 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:02:59 crc kubenswrapper[4712]: W0131 06:02:59.242678 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa5259a_2ba2_418d_b640_c1d9f1bfb3ea.slice/crio-c19ce8732bde95e86a187de8133adf4a5e5a4d9b8fd8d38369bcbce1cd0aff9c WatchSource:0}: Error finding container c19ce8732bde95e86a187de8133adf4a5e5a4d9b8fd8d38369bcbce1cd0aff9c: Status 404 returned error can't find the container with id c19ce8732bde95e86a187de8133adf4a5e5a4d9b8fd8d38369bcbce1cd0aff9c Jan 31 06:02:59 crc kubenswrapper[4712]: I0131 06:02:59.282221 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerStarted","Data":"c19ce8732bde95e86a187de8133adf4a5e5a4d9b8fd8d38369bcbce1cd0aff9c"} Jan 31 06:03:00 crc kubenswrapper[4712]: I0131 06:03:00.357010 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:02 crc kubenswrapper[4712]: I0131 06:03:02.308799 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerStarted","Data":"69b1005b3a043e98b12209e5ac427eee00f61936c522086f72b7bd1951bd6c67"} Jan 31 06:03:03 crc kubenswrapper[4712]: I0131 06:03:03.336619 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerStarted","Data":"7fea23c7ba43d4211156e717b1e75d004a35db5e0a3c929763b946b670d2dcb5"} Jan 31 06:03:04 crc kubenswrapper[4712]: I0131 06:03:04.348406 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerStarted","Data":"43368d5e2e58a6c85859042dd32c8a86ef4cda772bd6d7d96d72f39891d14925"} Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.381452 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerStarted","Data":"d4ddaf75f114a677dced024995bfe980fff027dcbfd6505c5510a54f107e92ba"} Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.382345 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-central-agent" containerID="cri-o://69b1005b3a043e98b12209e5ac427eee00f61936c522086f72b7bd1951bd6c67" gracePeriod=30 Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.382454 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.382605 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-notification-agent" containerID="cri-o://7fea23c7ba43d4211156e717b1e75d004a35db5e0a3c929763b946b670d2dcb5" gracePeriod=30 Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.382588 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="sg-core" containerID="cri-o://43368d5e2e58a6c85859042dd32c8a86ef4cda772bd6d7d96d72f39891d14925" gracePeriod=30 Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.382727 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="proxy-httpd" containerID="cri-o://d4ddaf75f114a677dced024995bfe980fff027dcbfd6505c5510a54f107e92ba" gracePeriod=30 Jan 31 06:03:06 crc kubenswrapper[4712]: I0131 06:03:06.413667 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7084836490000002 podStartE2EDuration="8.413644966s" podCreationTimestamp="2026-01-31 06:02:58 +0000 UTC" firstStartedPulling="2026-01-31 06:02:59.245854119 +0000 UTC m=+1445.339735960" lastFinishedPulling="2026-01-31 06:03:05.951015436 +0000 UTC m=+1452.044897277" observedRunningTime="2026-01-31 06:03:06.407125115 +0000 UTC m=+1452.501006976" watchObservedRunningTime="2026-01-31 06:03:06.413644966 +0000 UTC m=+1452.507526817" Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406087 4712 generic.go:334] "Generic (PLEG): container finished" podID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerID="d4ddaf75f114a677dced024995bfe980fff027dcbfd6505c5510a54f107e92ba" exitCode=0 Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406456 4712 generic.go:334] "Generic (PLEG): container finished" podID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerID="43368d5e2e58a6c85859042dd32c8a86ef4cda772bd6d7d96d72f39891d14925" exitCode=2 Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406467 4712 generic.go:334] "Generic (PLEG): container finished" podID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerID="7fea23c7ba43d4211156e717b1e75d004a35db5e0a3c929763b946b670d2dcb5" exitCode=0 Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406203 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerDied","Data":"d4ddaf75f114a677dced024995bfe980fff027dcbfd6505c5510a54f107e92ba"} Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406527 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerDied","Data":"43368d5e2e58a6c85859042dd32c8a86ef4cda772bd6d7d96d72f39891d14925"} Jan 31 06:03:07 crc kubenswrapper[4712]: I0131 06:03:07.406542 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerDied","Data":"7fea23c7ba43d4211156e717b1e75d004a35db5e0a3c929763b946b670d2dcb5"} Jan 31 06:03:10 crc kubenswrapper[4712]: I0131 06:03:10.436856 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l9r99" event={"ID":"aedf7767-82ba-4f3b-a34e-456f148922a4","Type":"ContainerStarted","Data":"ed9fddb4ead999a50964b5995e1584ab28d56e08fa42b77fef80229d6e3da572"} Jan 31 06:03:10 crc kubenswrapper[4712]: I0131 06:03:10.466971 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-l9r99" podStartSLOduration=2.226574967 podStartE2EDuration="31.466952579s" podCreationTimestamp="2026-01-31 06:02:39 +0000 UTC" firstStartedPulling="2026-01-31 06:02:40.803442985 +0000 UTC m=+1426.897324846" lastFinishedPulling="2026-01-31 06:03:10.043820617 +0000 UTC m=+1456.137702458" observedRunningTime="2026-01-31 06:03:10.465701569 +0000 UTC m=+1456.559583410" watchObservedRunningTime="2026-01-31 06:03:10.466952579 +0000 UTC m=+1456.560834420" Jan 31 06:03:12 crc kubenswrapper[4712]: I0131 06:03:12.496842 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:03:12 crc kubenswrapper[4712]: I0131 06:03:12.497290 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.506017 4712 generic.go:334] "Generic (PLEG): container finished" podID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerID="69b1005b3a043e98b12209e5ac427eee00f61936c522086f72b7bd1951bd6c67" exitCode=0 Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.516495 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerDied","Data":"69b1005b3a043e98b12209e5ac427eee00f61936c522086f72b7bd1951bd6c67"} Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.742094 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.790576 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.790639 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv4wn\" (UniqueName: \"kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.790668 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.790871 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.790995 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.791011 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.791049 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle\") pod \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\" (UID: \"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea\") " Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.791685 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.791817 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.801522 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts" (OuterVolumeSpecName: "scripts") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.801556 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn" (OuterVolumeSpecName: "kube-api-access-qv4wn") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "kube-api-access-qv4wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.832384 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.890425 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893583 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893614 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893623 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893631 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893642 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv4wn\" (UniqueName: \"kubernetes.io/projected/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-kube-api-access-qv4wn\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.893653 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.919358 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data" (OuterVolumeSpecName: "config-data") pod "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" (UID: "0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:16 crc kubenswrapper[4712]: I0131 06:03:16.995844 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.517152 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea","Type":"ContainerDied","Data":"c19ce8732bde95e86a187de8133adf4a5e5a4d9b8fd8d38369bcbce1cd0aff9c"} Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.517226 4712 scope.go:117] "RemoveContainer" containerID="d4ddaf75f114a677dced024995bfe980fff027dcbfd6505c5510a54f107e92ba" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.517288 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.543726 4712 scope.go:117] "RemoveContainer" containerID="43368d5e2e58a6c85859042dd32c8a86ef4cda772bd6d7d96d72f39891d14925" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.578002 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.589930 4712 scope.go:117] "RemoveContainer" containerID="7fea23c7ba43d4211156e717b1e75d004a35db5e0a3c929763b946b670d2dcb5" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.597142 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.615535 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:17 crc kubenswrapper[4712]: E0131 06:03:17.616053 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="proxy-httpd" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616083 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="proxy-httpd" Jan 31 06:03:17 crc kubenswrapper[4712]: E0131 06:03:17.616111 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-notification-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616119 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-notification-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: E0131 06:03:17.616142 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-central-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616150 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-central-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: E0131 06:03:17.616188 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="sg-core" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616197 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="sg-core" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616449 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-central-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616478 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="proxy-httpd" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616491 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="sg-core" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.616505 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" containerName="ceilometer-notification-agent" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.619476 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.622617 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.622995 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.630308 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.661336 4712 scope.go:117] "RemoveContainer" containerID="69b1005b3a043e98b12209e5ac427eee00f61936c522086f72b7bd1951bd6c67" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715592 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715636 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715672 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf9cm\" (UniqueName: \"kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715694 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715748 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715793 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.715828 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.817402 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.817797 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.817945 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.818038 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.818493 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf9cm\" (UniqueName: \"kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.818528 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.819377 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.819585 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.819988 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.821468 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.823069 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.823414 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.834866 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf9cm\" (UniqueName: \"kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.847519 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data\") pod \"ceilometer-0\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " pod="openstack/ceilometer-0" Jan 31 06:03:17 crc kubenswrapper[4712]: I0131 06:03:17.959966 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:18 crc kubenswrapper[4712]: I0131 06:03:18.429267 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:18 crc kubenswrapper[4712]: I0131 06:03:18.515048 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea" path="/var/lib/kubelet/pods/0fa5259a-2ba2-418d-b640-c1d9f1bfb3ea/volumes" Jan 31 06:03:18 crc kubenswrapper[4712]: I0131 06:03:18.527146 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerStarted","Data":"4a88507babde9cd76b24a2d539fff44db78879c65a08d9d8545d8594328e2fa2"} Jan 31 06:03:19 crc kubenswrapper[4712]: I0131 06:03:19.536333 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerStarted","Data":"f9c7fdc8e5ad724764a5fc2ffa8673696f880b7ccfdac65b387f45250bc830b1"} Jan 31 06:03:19 crc kubenswrapper[4712]: I0131 06:03:19.537023 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerStarted","Data":"20c8081c2befdb7c03c4e18cc10fc712916d9b21d1ea2010d6d0cdf4159f0597"} Jan 31 06:03:20 crc kubenswrapper[4712]: I0131 06:03:20.557582 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerStarted","Data":"7f6ae87a1556a135cf3615aa7be67981e481aaacf53b71fd620fb1ae0cbd3a03"} Jan 31 06:03:22 crc kubenswrapper[4712]: I0131 06:03:22.578734 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerStarted","Data":"912022b7b06b9fcc2f0e48fd2f504515451372a0f04b926b1044f32500618e9a"} Jan 31 06:03:22 crc kubenswrapper[4712]: I0131 06:03:22.579443 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:03:22 crc kubenswrapper[4712]: I0131 06:03:22.604723 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.083080957 podStartE2EDuration="5.604690167s" podCreationTimestamp="2026-01-31 06:03:17 +0000 UTC" firstStartedPulling="2026-01-31 06:03:18.439146134 +0000 UTC m=+1464.533027975" lastFinishedPulling="2026-01-31 06:03:21.960755344 +0000 UTC m=+1468.054637185" observedRunningTime="2026-01-31 06:03:22.59954525 +0000 UTC m=+1468.693427091" watchObservedRunningTime="2026-01-31 06:03:22.604690167 +0000 UTC m=+1468.698572008" Jan 31 06:03:24 crc kubenswrapper[4712]: I0131 06:03:24.988898 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:24 crc kubenswrapper[4712]: I0131 06:03:24.989743 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-central-agent" containerID="cri-o://20c8081c2befdb7c03c4e18cc10fc712916d9b21d1ea2010d6d0cdf4159f0597" gracePeriod=30 Jan 31 06:03:24 crc kubenswrapper[4712]: I0131 06:03:24.989833 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="sg-core" containerID="cri-o://7f6ae87a1556a135cf3615aa7be67981e481aaacf53b71fd620fb1ae0cbd3a03" gracePeriod=30 Jan 31 06:03:24 crc kubenswrapper[4712]: I0131 06:03:24.989850 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="proxy-httpd" containerID="cri-o://912022b7b06b9fcc2f0e48fd2f504515451372a0f04b926b1044f32500618e9a" gracePeriod=30 Jan 31 06:03:24 crc kubenswrapper[4712]: I0131 06:03:24.989894 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-notification-agent" containerID="cri-o://f9c7fdc8e5ad724764a5fc2ffa8673696f880b7ccfdac65b387f45250bc830b1" gracePeriod=30 Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612337 4712 generic.go:334] "Generic (PLEG): container finished" podID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerID="912022b7b06b9fcc2f0e48fd2f504515451372a0f04b926b1044f32500618e9a" exitCode=0 Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612671 4712 generic.go:334] "Generic (PLEG): container finished" podID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerID="7f6ae87a1556a135cf3615aa7be67981e481aaacf53b71fd620fb1ae0cbd3a03" exitCode=2 Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612682 4712 generic.go:334] "Generic (PLEG): container finished" podID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerID="f9c7fdc8e5ad724764a5fc2ffa8673696f880b7ccfdac65b387f45250bc830b1" exitCode=0 Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612452 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerDied","Data":"912022b7b06b9fcc2f0e48fd2f504515451372a0f04b926b1044f32500618e9a"} Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612721 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerDied","Data":"7f6ae87a1556a135cf3615aa7be67981e481aaacf53b71fd620fb1ae0cbd3a03"} Jan 31 06:03:25 crc kubenswrapper[4712]: I0131 06:03:25.612735 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerDied","Data":"f9c7fdc8e5ad724764a5fc2ffa8673696f880b7ccfdac65b387f45250bc830b1"} Jan 31 06:03:28 crc kubenswrapper[4712]: I0131 06:03:28.646000 4712 generic.go:334] "Generic (PLEG): container finished" podID="aedf7767-82ba-4f3b-a34e-456f148922a4" containerID="ed9fddb4ead999a50964b5995e1584ab28d56e08fa42b77fef80229d6e3da572" exitCode=0 Jan 31 06:03:28 crc kubenswrapper[4712]: I0131 06:03:28.646098 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l9r99" event={"ID":"aedf7767-82ba-4f3b-a34e-456f148922a4","Type":"ContainerDied","Data":"ed9fddb4ead999a50964b5995e1584ab28d56e08fa42b77fef80229d6e3da572"} Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.055631 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.180050 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle\") pod \"aedf7767-82ba-4f3b-a34e-456f148922a4\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.180115 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfzpm\" (UniqueName: \"kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm\") pod \"aedf7767-82ba-4f3b-a34e-456f148922a4\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.180260 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data\") pod \"aedf7767-82ba-4f3b-a34e-456f148922a4\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.180433 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts\") pod \"aedf7767-82ba-4f3b-a34e-456f148922a4\" (UID: \"aedf7767-82ba-4f3b-a34e-456f148922a4\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.187314 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts" (OuterVolumeSpecName: "scripts") pod "aedf7767-82ba-4f3b-a34e-456f148922a4" (UID: "aedf7767-82ba-4f3b-a34e-456f148922a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.187418 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm" (OuterVolumeSpecName: "kube-api-access-tfzpm") pod "aedf7767-82ba-4f3b-a34e-456f148922a4" (UID: "aedf7767-82ba-4f3b-a34e-456f148922a4"). InnerVolumeSpecName "kube-api-access-tfzpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.213426 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aedf7767-82ba-4f3b-a34e-456f148922a4" (UID: "aedf7767-82ba-4f3b-a34e-456f148922a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.232442 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data" (OuterVolumeSpecName: "config-data") pod "aedf7767-82ba-4f3b-a34e-456f148922a4" (UID: "aedf7767-82ba-4f3b-a34e-456f148922a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.283606 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.283685 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfzpm\" (UniqueName: \"kubernetes.io/projected/aedf7767-82ba-4f3b-a34e-456f148922a4-kube-api-access-tfzpm\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.283718 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.283736 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aedf7767-82ba-4f3b-a34e-456f148922a4-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.670668 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l9r99" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.670852 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l9r99" event={"ID":"aedf7767-82ba-4f3b-a34e-456f148922a4","Type":"ContainerDied","Data":"5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab"} Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.671326 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5de020a295603b1b10dcb8502cb70540067dd243afafa8542a4d3d16009b24ab" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.673873 4712 generic.go:334] "Generic (PLEG): container finished" podID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerID="20c8081c2befdb7c03c4e18cc10fc712916d9b21d1ea2010d6d0cdf4159f0597" exitCode=0 Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.673910 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerDied","Data":"20c8081c2befdb7c03c4e18cc10fc712916d9b21d1ea2010d6d0cdf4159f0597"} Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.785698 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 31 06:03:30 crc kubenswrapper[4712]: E0131 06:03:30.786366 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" containerName="nova-cell0-conductor-db-sync" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.786390 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" containerName="nova-cell0-conductor-db-sync" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.786681 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" containerName="nova-cell0-conductor-db-sync" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.787682 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.791813 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.792078 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-52vzn" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.817887 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.823970 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.899506 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.900127 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.900246 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.900352 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.900399 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901053 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf9cm\" (UniqueName: \"kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901109 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901334 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml\") pod \"3be55f4c-4557-45cf-a2a0-c501c694337c\" (UID: \"3be55f4c-4557-45cf-a2a0-c501c694337c\") " Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901745 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901784 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g5tv\" (UniqueName: \"kubernetes.io/projected/4c060068-c993-4028-8639-64e08eb08bd4-kube-api-access-6g5tv\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901865 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.901858 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.902531 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.902586 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3be55f4c-4557-45cf-a2a0-c501c694337c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.906191 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm" (OuterVolumeSpecName: "kube-api-access-qf9cm") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "kube-api-access-qf9cm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.908343 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts" (OuterVolumeSpecName: "scripts") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.927713 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.976298 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:30 crc kubenswrapper[4712]: I0131 06:03:30.995605 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data" (OuterVolumeSpecName: "config-data") pod "3be55f4c-4557-45cf-a2a0-c501c694337c" (UID: "3be55f4c-4557-45cf-a2a0-c501c694337c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004464 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004525 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g5tv\" (UniqueName: \"kubernetes.io/projected/4c060068-c993-4028-8639-64e08eb08bd4-kube-api-access-6g5tv\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004592 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004728 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf9cm\" (UniqueName: \"kubernetes.io/projected/3be55f4c-4557-45cf-a2a0-c501c694337c-kube-api-access-qf9cm\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004750 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004759 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004768 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.004776 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3be55f4c-4557-45cf-a2a0-c501c694337c-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.009906 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.010063 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c060068-c993-4028-8639-64e08eb08bd4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.030316 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g5tv\" (UniqueName: \"kubernetes.io/projected/4c060068-c993-4028-8639-64e08eb08bd4-kube-api-access-6g5tv\") pod \"nova-cell0-conductor-0\" (UID: \"4c060068-c993-4028-8639-64e08eb08bd4\") " pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.114182 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.617040 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.686731 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4c060068-c993-4028-8639-64e08eb08bd4","Type":"ContainerStarted","Data":"1b12406867b5ae974ee93a6721c5bdfe2821c9d5d3173670ecaa8e38a0e841f9"} Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.692068 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3be55f4c-4557-45cf-a2a0-c501c694337c","Type":"ContainerDied","Data":"4a88507babde9cd76b24a2d539fff44db78879c65a08d9d8545d8594328e2fa2"} Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.692141 4712 scope.go:117] "RemoveContainer" containerID="912022b7b06b9fcc2f0e48fd2f504515451372a0f04b926b1044f32500618e9a" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.692150 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.750472 4712 scope.go:117] "RemoveContainer" containerID="7f6ae87a1556a135cf3615aa7be67981e481aaacf53b71fd620fb1ae0cbd3a03" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.756142 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.777732 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.794132 4712 scope.go:117] "RemoveContainer" containerID="f9c7fdc8e5ad724764a5fc2ffa8673696f880b7ccfdac65b387f45250bc830b1" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.796384 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:31 crc kubenswrapper[4712]: E0131 06:03:31.796851 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="proxy-httpd" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.796868 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="proxy-httpd" Jan 31 06:03:31 crc kubenswrapper[4712]: E0131 06:03:31.796887 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-notification-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.796894 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-notification-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: E0131 06:03:31.796915 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-central-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.796921 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-central-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: E0131 06:03:31.796935 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="sg-core" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.796941 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="sg-core" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.797118 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-central-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.797134 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="sg-core" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.797153 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="ceilometer-notification-agent" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.797161 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" containerName="proxy-httpd" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.799126 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.802244 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.802444 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.810388 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.834802 4712 scope.go:117] "RemoveContainer" containerID="20c8081c2befdb7c03c4e18cc10fc712916d9b21d1ea2010d6d0cdf4159f0597" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.925723 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926398 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926530 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926647 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926764 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926849 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:31 crc kubenswrapper[4712]: I0131 06:03:31.926936 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqc86\" (UniqueName: \"kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029732 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029782 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029823 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029879 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029935 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029955 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.029981 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqc86\" (UniqueName: \"kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.031129 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.031114 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.036056 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.036265 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.036862 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.052520 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqc86\" (UniqueName: \"kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.052558 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data\") pod \"ceilometer-0\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.136886 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.519062 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3be55f4c-4557-45cf-a2a0-c501c694337c" path="/var/lib/kubelet/pods/3be55f4c-4557-45cf-a2a0-c501c694337c/volumes" Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.587656 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:03:32 crc kubenswrapper[4712]: W0131 06:03:32.590776 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0eaa8b0_a7b5_41af_ba8d_dc01413185d5.slice/crio-b7947cf307e84e416ea82289c4e9e9eb207e196b5f0895376def5bbabdaa53fc WatchSource:0}: Error finding container b7947cf307e84e416ea82289c4e9e9eb207e196b5f0895376def5bbabdaa53fc: Status 404 returned error can't find the container with id b7947cf307e84e416ea82289c4e9e9eb207e196b5f0895376def5bbabdaa53fc Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.702166 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerStarted","Data":"b7947cf307e84e416ea82289c4e9e9eb207e196b5f0895376def5bbabdaa53fc"} Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.705121 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4c060068-c993-4028-8639-64e08eb08bd4","Type":"ContainerStarted","Data":"7e497c825df163ad56526c55e4204408638a76cb596d6903242a44e6f57413d9"} Jan 31 06:03:32 crc kubenswrapper[4712]: I0131 06:03:32.706290 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:33 crc kubenswrapper[4712]: I0131 06:03:33.716502 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerStarted","Data":"ffe351fbbd6fc1b2d62280df4bfcf3b10438ce8bb90c044786c2c9e272ca1b2f"} Jan 31 06:03:33 crc kubenswrapper[4712]: I0131 06:03:33.717207 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerStarted","Data":"16529ce4a3397587f366d6846b2ffc01977be77a0b2a36dbf6e6dd6daf3425ea"} Jan 31 06:03:34 crc kubenswrapper[4712]: I0131 06:03:34.543283 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=4.543263751 podStartE2EDuration="4.543263751s" podCreationTimestamp="2026-01-31 06:03:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:32.727967251 +0000 UTC m=+1478.821849122" watchObservedRunningTime="2026-01-31 06:03:34.543263751 +0000 UTC m=+1480.637145592" Jan 31 06:03:34 crc kubenswrapper[4712]: I0131 06:03:34.729539 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerStarted","Data":"400243b0e49f775921d6d1f11e0554208eb7dd21ad91ff199f55c1dcef097cfa"} Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.141843 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.705675 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-jpj94"] Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.707327 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.715420 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.715770 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.732356 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jpj94"] Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.802967 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerStarted","Data":"f50126ac4addac8cf370aa7bc075f9c6b40b23f4d00cffeef7348a08b93fead2"} Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.804495 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.839474 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.024588769 podStartE2EDuration="5.839458495s" podCreationTimestamp="2026-01-31 06:03:31 +0000 UTC" firstStartedPulling="2026-01-31 06:03:32.593404767 +0000 UTC m=+1478.687286598" lastFinishedPulling="2026-01-31 06:03:36.408274483 +0000 UTC m=+1482.502156324" observedRunningTime="2026-01-31 06:03:36.836773008 +0000 UTC m=+1482.930654849" watchObservedRunningTime="2026-01-31 06:03:36.839458495 +0000 UTC m=+1482.933340336" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.857577 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.857694 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.857797 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wchf9\" (UniqueName: \"kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.857963 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.936448 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.938085 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.940828 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.960455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.960636 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.960723 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.960827 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wchf9\" (UniqueName: \"kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.965567 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.967922 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.972909 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.975091 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.975599 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.985951 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.992930 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wchf9\" (UniqueName: \"kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9\") pod \"nova-cell0-cell-mapping-jpj94\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:36 crc kubenswrapper[4712]: I0131 06:03:36.994389 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.021641 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.049646 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.063724 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.063781 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnhjp\" (UniqueName: \"kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.063832 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dv4t\" (UniqueName: \"kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.067529 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.067601 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.067656 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.067687 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.123742 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.125756 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.130932 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.154127 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.155896 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.158511 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.173821 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.173865 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnhjp\" (UniqueName: \"kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.173917 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dv4t\" (UniqueName: \"kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.173968 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.173995 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.174022 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.174041 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.177388 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.186862 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.189313 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.200143 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.203161 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.203636 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.203725 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.220884 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnhjp\" (UniqueName: \"kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp\") pod \"nova-api-0\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.244811 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dv4t\" (UniqueName: \"kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t\") pod \"nova-scheduler-0\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.262858 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.264563 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.270292 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275082 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275132 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlnzm\" (UniqueName: \"kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275227 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt986\" (UniqueName: \"kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275285 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275317 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275342 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.275402 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.276486 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.377755 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.377984 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt986\" (UniqueName: \"kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.378019 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.378080 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379541 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379581 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379656 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379676 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vtsw\" (UniqueName: \"kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379883 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.379947 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.380048 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.380096 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlnzm\" (UniqueName: \"kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.380133 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.383959 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.392567 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.397477 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.398768 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.402902 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlnzm\" (UniqueName: \"kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm\") pod \"nova-metadata-0\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.404465 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.407793 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt986\" (UniqueName: \"kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986\") pod \"nova-cell1-novncproxy-0\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.410745 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.458733 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482112 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482188 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482222 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vtsw\" (UniqueName: \"kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482276 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482346 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.482385 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.483277 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.483466 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.483934 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.484686 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.484906 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.506111 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vtsw\" (UniqueName: \"kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw\") pod \"dnsmasq-dns-56bbbd958c-sm4v8\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.528424 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.613319 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.704159 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jpj94"] Jan 31 06:03:37 crc kubenswrapper[4712]: W0131 06:03:37.739942 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb6bb4ae_9226_40ca_8117_3b62a4b91261.slice/crio-284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247 WatchSource:0}: Error finding container 284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247: Status 404 returned error can't find the container with id 284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247 Jan 31 06:03:37 crc kubenswrapper[4712]: I0131 06:03:37.829311 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jpj94" event={"ID":"bb6bb4ae-9226-40ca-8117-3b62a4b91261","Type":"ContainerStarted","Data":"284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247"} Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:37.930886 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.206009 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.254740 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.659203 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kzkg5"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.660440 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.663763 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.664580 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.671489 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kzkg5"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.818241 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.818646 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.818741 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfplt\" (UniqueName: \"kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.818772 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.849961 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerStarted","Data":"15b4e89e636f9fd03c4b216badfe9492bb3087897e7dba73dbdc6c08c48ed44c"} Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.851980 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jpj94" event={"ID":"bb6bb4ae-9226-40ca-8117-3b62a4b91261","Type":"ContainerStarted","Data":"f63a07224856ba1ec27e2852b839aa1d3077fe9b49c33f31462652de2df3864b"} Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.853715 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"57f643a8-d361-48e8-a982-df43b7a01b65","Type":"ContainerStarted","Data":"9f8072fdda30904a6b0a960337823ebbd8801dd55d749aba0786d1a4a21354a2"} Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.872930 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-jpj94" podStartSLOduration=2.872908779 podStartE2EDuration="2.872908779s" podCreationTimestamp="2026-01-31 06:03:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:38.867488605 +0000 UTC m=+1484.961370446" watchObservedRunningTime="2026-01-31 06:03:38.872908779 +0000 UTC m=+1484.966790630" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.877161 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0b152827-c2b8-4a87-96ef-e4ec84e4265a","Type":"ContainerStarted","Data":"8c8470ebaae6163953c92822bb9a7decd1063d78d0ba76a6fe8ed59d8f71ae42"} Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.922593 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.922641 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.922696 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfplt\" (UniqueName: \"kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.922721 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.933377 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.935960 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.936769 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.940233 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.942305 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfplt\" (UniqueName: \"kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt\") pod \"nova-cell1-conductor-db-sync-kzkg5\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.952934 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:03:38 crc kubenswrapper[4712]: W0131 06:03:38.957873 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7036754_54d9_4ca3_a12e_0ff31a7acb2a.slice/crio-76d154d01e4eab9553c167fc139a3541cdb9ec2349b8f10763f5019e5e5fd5c9 WatchSource:0}: Error finding container 76d154d01e4eab9553c167fc139a3541cdb9ec2349b8f10763f5019e5e5fd5c9: Status 404 returned error can't find the container with id 76d154d01e4eab9553c167fc139a3541cdb9ec2349b8f10763f5019e5e5fd5c9 Jan 31 06:03:38 crc kubenswrapper[4712]: I0131 06:03:38.991513 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.680241 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kzkg5"] Jan 31 06:03:39 crc kubenswrapper[4712]: W0131 06:03:39.696922 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd484669e_304d_4389_b015_9479aadf2675.slice/crio-9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14 WatchSource:0}: Error finding container 9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14: Status 404 returned error can't find the container with id 9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14 Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.889628 4712 generic.go:334] "Generic (PLEG): container finished" podID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerID="288bde4d6ccc617d7ba335da3f2d56f8e537d7f2c002e15c58af19d3ffafc628" exitCode=0 Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.889717 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" event={"ID":"a7036754-54d9-4ca3-a12e-0ff31a7acb2a","Type":"ContainerDied","Data":"288bde4d6ccc617d7ba335da3f2d56f8e537d7f2c002e15c58af19d3ffafc628"} Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.889766 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" event={"ID":"a7036754-54d9-4ca3-a12e-0ff31a7acb2a","Type":"ContainerStarted","Data":"76d154d01e4eab9553c167fc139a3541cdb9ec2349b8f10763f5019e5e5fd5c9"} Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.892895 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" event={"ID":"d484669e-304d-4389-b015-9479aadf2675","Type":"ContainerStarted","Data":"9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14"} Jan 31 06:03:39 crc kubenswrapper[4712]: I0131 06:03:39.901945 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerStarted","Data":"dc83f1f1e8eafeb233d1f2058aa314e898dd5f1a028de3eaa57dc81eaea912cf"} Jan 31 06:03:40 crc kubenswrapper[4712]: I0131 06:03:40.619927 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:03:40 crc kubenswrapper[4712]: I0131 06:03:40.657540 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:40 crc kubenswrapper[4712]: I0131 06:03:40.956194 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" event={"ID":"d484669e-304d-4389-b015-9479aadf2675","Type":"ContainerStarted","Data":"3681a2c1b37e8bd0ff64b91199e6c2c709faee255dc78a48536bf8904eb1a194"} Jan 31 06:03:40 crc kubenswrapper[4712]: I0131 06:03:40.989312 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" podStartSLOduration=2.989288829 podStartE2EDuration="2.989288829s" podCreationTimestamp="2026-01-31 06:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:40.988070959 +0000 UTC m=+1487.081952800" watchObservedRunningTime="2026-01-31 06:03:40.989288829 +0000 UTC m=+1487.083170670" Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.497317 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.497780 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.982470 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerStarted","Data":"48f5d9c9abad68a23bac6c9b9067fb10f232a40ecdfa3f6d97f96521dd47425e"} Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.982878 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerStarted","Data":"8a587596cb89143712256533329f24a846fdb3366a076f683c69a04d7ec3ced5"} Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.982890 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-metadata" containerID="cri-o://48f5d9c9abad68a23bac6c9b9067fb10f232a40ecdfa3f6d97f96521dd47425e" gracePeriod=30 Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.982792 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-log" containerID="cri-o://8a587596cb89143712256533329f24a846fdb3366a076f683c69a04d7ec3ced5" gracePeriod=30 Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.987537 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" event={"ID":"a7036754-54d9-4ca3-a12e-0ff31a7acb2a","Type":"ContainerStarted","Data":"023b9d793e9cce5c9385c5522f90000495c22c9cb4f8a520db05b330bf39469a"} Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.988721 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.992990 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0b152827-c2b8-4a87-96ef-e4ec84e4265a","Type":"ContainerStarted","Data":"e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f"} Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.999221 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerStarted","Data":"43d985225c897c59f456a99e061bf81c30c2a34af1363b01382923b65f34b89e"} Jan 31 06:03:42 crc kubenswrapper[4712]: I0131 06:03:42.999281 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerStarted","Data":"50f3a772cda8e2ca05746479d9688ba2f8314bbfcc17712987b188afea3f96e5"} Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.002076 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"57f643a8-d361-48e8-a982-df43b7a01b65","Type":"ContainerStarted","Data":"5e85318643e4f4c8317b2eb4076750900e2077075052a51df54abaf15003905a"} Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.002234 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="57f643a8-d361-48e8-a982-df43b7a01b65" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5e85318643e4f4c8317b2eb4076750900e2077075052a51df54abaf15003905a" gracePeriod=30 Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.021856 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.788868601 podStartE2EDuration="6.021634366s" podCreationTimestamp="2026-01-31 06:03:37 +0000 UTC" firstStartedPulling="2026-01-31 06:03:38.957834824 +0000 UTC m=+1485.051716665" lastFinishedPulling="2026-01-31 06:03:42.190600569 +0000 UTC m=+1488.284482430" observedRunningTime="2026-01-31 06:03:43.006652025 +0000 UTC m=+1489.100533866" watchObservedRunningTime="2026-01-31 06:03:43.021634366 +0000 UTC m=+1489.115516207" Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.064956 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.081847716 podStartE2EDuration="6.064934009s" podCreationTimestamp="2026-01-31 06:03:37 +0000 UTC" firstStartedPulling="2026-01-31 06:03:38.214920819 +0000 UTC m=+1484.308802660" lastFinishedPulling="2026-01-31 06:03:42.198007112 +0000 UTC m=+1488.291888953" observedRunningTime="2026-01-31 06:03:43.061417142 +0000 UTC m=+1489.155298983" watchObservedRunningTime="2026-01-31 06:03:43.064934009 +0000 UTC m=+1489.158815850" Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.066004 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.104306512 podStartE2EDuration="7.065998115s" podCreationTimestamp="2026-01-31 06:03:36 +0000 UTC" firstStartedPulling="2026-01-31 06:03:38.215586036 +0000 UTC m=+1484.309467877" lastFinishedPulling="2026-01-31 06:03:42.177277629 +0000 UTC m=+1488.271159480" observedRunningTime="2026-01-31 06:03:43.042703538 +0000 UTC m=+1489.136585379" watchObservedRunningTime="2026-01-31 06:03:43.065998115 +0000 UTC m=+1489.159879956" Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.096687 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" podStartSLOduration=6.096655525 podStartE2EDuration="6.096655525s" podCreationTimestamp="2026-01-31 06:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:43.091440305 +0000 UTC m=+1489.185322146" watchObservedRunningTime="2026-01-31 06:03:43.096655525 +0000 UTC m=+1489.190537366" Jan 31 06:03:43 crc kubenswrapper[4712]: I0131 06:03:43.140690 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.879542734 podStartE2EDuration="7.140670085s" podCreationTimestamp="2026-01-31 06:03:36 +0000 UTC" firstStartedPulling="2026-01-31 06:03:37.924227818 +0000 UTC m=+1484.018109659" lastFinishedPulling="2026-01-31 06:03:42.185355179 +0000 UTC m=+1488.279237010" observedRunningTime="2026-01-31 06:03:43.121736226 +0000 UTC m=+1489.215618067" watchObservedRunningTime="2026-01-31 06:03:43.140670085 +0000 UTC m=+1489.234551926" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.017289 4712 generic.go:334] "Generic (PLEG): container finished" podID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerID="48f5d9c9abad68a23bac6c9b9067fb10f232a40ecdfa3f6d97f96521dd47425e" exitCode=0 Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.017765 4712 generic.go:334] "Generic (PLEG): container finished" podID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerID="8a587596cb89143712256533329f24a846fdb3366a076f683c69a04d7ec3ced5" exitCode=143 Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.017468 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerDied","Data":"48f5d9c9abad68a23bac6c9b9067fb10f232a40ecdfa3f6d97f96521dd47425e"} Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.018759 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerDied","Data":"8a587596cb89143712256533329f24a846fdb3366a076f683c69a04d7ec3ced5"} Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.426862 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.486721 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlnzm\" (UniqueName: \"kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm\") pod \"b68b3b66-cd31-4701-965f-d1fb56854ee7\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.497720 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm" (OuterVolumeSpecName: "kube-api-access-tlnzm") pod "b68b3b66-cd31-4701-965f-d1fb56854ee7" (UID: "b68b3b66-cd31-4701-965f-d1fb56854ee7"). InnerVolumeSpecName "kube-api-access-tlnzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.588687 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle\") pod \"b68b3b66-cd31-4701-965f-d1fb56854ee7\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.588894 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data\") pod \"b68b3b66-cd31-4701-965f-d1fb56854ee7\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.588916 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs\") pod \"b68b3b66-cd31-4701-965f-d1fb56854ee7\" (UID: \"b68b3b66-cd31-4701-965f-d1fb56854ee7\") " Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.589385 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlnzm\" (UniqueName: \"kubernetes.io/projected/b68b3b66-cd31-4701-965f-d1fb56854ee7-kube-api-access-tlnzm\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.589522 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs" (OuterVolumeSpecName: "logs") pod "b68b3b66-cd31-4701-965f-d1fb56854ee7" (UID: "b68b3b66-cd31-4701-965f-d1fb56854ee7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.616693 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data" (OuterVolumeSpecName: "config-data") pod "b68b3b66-cd31-4701-965f-d1fb56854ee7" (UID: "b68b3b66-cd31-4701-965f-d1fb56854ee7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.620011 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b68b3b66-cd31-4701-965f-d1fb56854ee7" (UID: "b68b3b66-cd31-4701-965f-d1fb56854ee7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.691098 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.691140 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b68b3b66-cd31-4701-965f-d1fb56854ee7-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:44 crc kubenswrapper[4712]: I0131 06:03:44.691152 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b68b3b66-cd31-4701-965f-d1fb56854ee7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.030658 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.039963 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b68b3b66-cd31-4701-965f-d1fb56854ee7","Type":"ContainerDied","Data":"dc83f1f1e8eafeb233d1f2058aa314e898dd5f1a028de3eaa57dc81eaea912cf"} Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.040020 4712 scope.go:117] "RemoveContainer" containerID="48f5d9c9abad68a23bac6c9b9067fb10f232a40ecdfa3f6d97f96521dd47425e" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.065053 4712 scope.go:117] "RemoveContainer" containerID="8a587596cb89143712256533329f24a846fdb3366a076f683c69a04d7ec3ced5" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.073685 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.087403 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.099959 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:45 crc kubenswrapper[4712]: E0131 06:03:45.100427 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-metadata" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.100446 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-metadata" Jan 31 06:03:45 crc kubenswrapper[4712]: E0131 06:03:45.100470 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-log" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.100478 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-log" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.100643 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-log" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.100670 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" containerName="nova-metadata-metadata" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.101678 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.109803 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.110145 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.123310 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.208519 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.208626 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.208717 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.208739 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gvwj\" (UniqueName: \"kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.208776 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.310843 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.311600 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.311723 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.311747 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gvwj\" (UniqueName: \"kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.311778 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.312131 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.317421 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.317443 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.330804 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.331706 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gvwj\" (UniqueName: \"kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj\") pod \"nova-metadata-0\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.426563 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:45 crc kubenswrapper[4712]: I0131 06:03:45.871243 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:46 crc kubenswrapper[4712]: I0131 06:03:46.039583 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerStarted","Data":"c164c756e23ef24ca1955ea223888b8ddac02cd21a3ed62bd9ea7680404dc9ff"} Jan 31 06:03:46 crc kubenswrapper[4712]: I0131 06:03:46.518312 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b68b3b66-cd31-4701-965f-d1fb56854ee7" path="/var/lib/kubelet/pods/b68b3b66-cd31-4701-965f-d1fb56854ee7/volumes" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.050106 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerStarted","Data":"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9"} Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.050156 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerStarted","Data":"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f"} Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.082742 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.082718412 podStartE2EDuration="2.082718412s" podCreationTimestamp="2026-01-31 06:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:47.070701734 +0000 UTC m=+1493.164583565" watchObservedRunningTime="2026-01-31 06:03:47.082718412 +0000 UTC m=+1493.176600273" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.271365 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.271494 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.412077 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.412141 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.459539 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.460821 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.615317 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.674227 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:03:47 crc kubenswrapper[4712]: I0131 06:03:47.674470 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="dnsmasq-dns" containerID="cri-o://e37f59ccedffffa7f93fd2a9947cc705171929dc3e78d65ce03450efcd1d911d" gracePeriod=10 Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.063957 4712 generic.go:334] "Generic (PLEG): container finished" podID="f5da79de-08a9-473b-ad68-846cb4403a85" containerID="e37f59ccedffffa7f93fd2a9947cc705171929dc3e78d65ce03450efcd1d911d" exitCode=0 Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.064036 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" event={"ID":"f5da79de-08a9-473b-ad68-846cb4403a85","Type":"ContainerDied","Data":"e37f59ccedffffa7f93fd2a9947cc705171929dc3e78d65ce03450efcd1d911d"} Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.066137 4712 generic.go:334] "Generic (PLEG): container finished" podID="bb6bb4ae-9226-40ca-8117-3b62a4b91261" containerID="f63a07224856ba1ec27e2852b839aa1d3077fe9b49c33f31462652de2df3864b" exitCode=0 Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.067283 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jpj94" event={"ID":"bb6bb4ae-9226-40ca-8117-3b62a4b91261","Type":"ContainerDied","Data":"f63a07224856ba1ec27e2852b839aa1d3077fe9b49c33f31462652de2df3864b"} Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.107029 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.354367 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.354392 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.355927 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.391848 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhltd\" (UniqueName: \"kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.392065 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.392117 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.392151 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.392226 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.392279 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config\") pod \"f5da79de-08a9-473b-ad68-846cb4403a85\" (UID: \"f5da79de-08a9-473b-ad68-846cb4403a85\") " Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.448738 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd" (OuterVolumeSpecName: "kube-api-access-dhltd") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "kube-api-access-dhltd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.468995 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config" (OuterVolumeSpecName: "config") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.495826 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.495856 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhltd\" (UniqueName: \"kubernetes.io/projected/f5da79de-08a9-473b-ad68-846cb4403a85-kube-api-access-dhltd\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.502711 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.502841 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.503087 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.503097 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f5da79de-08a9-473b-ad68-846cb4403a85" (UID: "f5da79de-08a9-473b-ad68-846cb4403a85"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.597610 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.597648 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.597660 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:48 crc kubenswrapper[4712]: I0131 06:03:48.597674 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5da79de-08a9-473b-ad68-846cb4403a85-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.077318 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.077773 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66bccdcd7c-pgqbm" event={"ID":"f5da79de-08a9-473b-ad68-846cb4403a85","Type":"ContainerDied","Data":"1dc48d6d49a65f3f180684236133f168a550f5481d014999a0d356f95aba04fe"} Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.077806 4712 scope.go:117] "RemoveContainer" containerID="e37f59ccedffffa7f93fd2a9947cc705171929dc3e78d65ce03450efcd1d911d" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.148571 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.152316 4712 scope.go:117] "RemoveContainer" containerID="b58abf8caf9af39c62cfa3f55e85b3569dce310107d1609ad07a24e5650ca846" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.198522 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66bccdcd7c-pgqbm"] Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.689614 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.825562 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle\") pod \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.825745 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data\") pod \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.825800 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wchf9\" (UniqueName: \"kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9\") pod \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.825899 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts\") pod \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\" (UID: \"bb6bb4ae-9226-40ca-8117-3b62a4b91261\") " Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.831701 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9" (OuterVolumeSpecName: "kube-api-access-wchf9") pod "bb6bb4ae-9226-40ca-8117-3b62a4b91261" (UID: "bb6bb4ae-9226-40ca-8117-3b62a4b91261"). InnerVolumeSpecName "kube-api-access-wchf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.845232 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts" (OuterVolumeSpecName: "scripts") pod "bb6bb4ae-9226-40ca-8117-3b62a4b91261" (UID: "bb6bb4ae-9226-40ca-8117-3b62a4b91261"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.858001 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb6bb4ae-9226-40ca-8117-3b62a4b91261" (UID: "bb6bb4ae-9226-40ca-8117-3b62a4b91261"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.863814 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data" (OuterVolumeSpecName: "config-data") pod "bb6bb4ae-9226-40ca-8117-3b62a4b91261" (UID: "bb6bb4ae-9226-40ca-8117-3b62a4b91261"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.928396 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wchf9\" (UniqueName: \"kubernetes.io/projected/bb6bb4ae-9226-40ca-8117-3b62a4b91261-kube-api-access-wchf9\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.928429 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.928441 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:49 crc kubenswrapper[4712]: I0131 06:03:49.928451 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb6bb4ae-9226-40ca-8117-3b62a4b91261-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.088504 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jpj94" event={"ID":"bb6bb4ae-9226-40ca-8117-3b62a4b91261","Type":"ContainerDied","Data":"284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247"} Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.088546 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="284d9650d9ab03bd36bddf3ab635d2e58caf4fa6404371e7533ad2d35cee2247" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.088613 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jpj94" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.218767 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.219065 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-log" containerID="cri-o://50f3a772cda8e2ca05746479d9688ba2f8314bbfcc17712987b188afea3f96e5" gracePeriod=30 Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.219152 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-api" containerID="cri-o://43d985225c897c59f456a99e061bf81c30c2a34af1363b01382923b65f34b89e" gracePeriod=30 Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.229707 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.229902 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" containerID="cri-o://e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" gracePeriod=30 Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.256715 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.256959 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-log" containerID="cri-o://74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" gracePeriod=30 Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.257104 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-metadata" containerID="cri-o://bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" gracePeriod=30 Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.427783 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.427928 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.518612 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" path="/var/lib/kubelet/pods/f5da79de-08a9-473b-ad68-846cb4403a85/volumes" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.823793 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.959909 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.960029 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.960092 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.960234 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gvwj\" (UniqueName: \"kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.960384 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.972264 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs" (OuterVolumeSpecName: "logs") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:50 crc kubenswrapper[4712]: I0131 06:03:50.982693 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj" (OuterVolumeSpecName: "kube-api-access-2gvwj") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "kube-api-access-2gvwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.005689 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.021742 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data" (OuterVolumeSpecName: "config-data") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.062283 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.062456 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") pod \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\" (UID: \"f66f9ae7-e3b2-4313-9020-1883f147c8c4\") " Jan 31 06:03:51 crc kubenswrapper[4712]: W0131 06:03:51.062566 4712 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/f66f9ae7-e3b2-4313-9020-1883f147c8c4/volumes/kubernetes.io~secret/nova-metadata-tls-certs Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.062584 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f66f9ae7-e3b2-4313-9020-1883f147c8c4" (UID: "f66f9ae7-e3b2-4313-9020-1883f147c8c4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.063232 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f66f9ae7-e3b2-4313-9020-1883f147c8c4-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.063257 4712 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.063271 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.063281 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66f9ae7-e3b2-4313-9020-1883f147c8c4-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.063291 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gvwj\" (UniqueName: \"kubernetes.io/projected/f66f9ae7-e3b2-4313-9020-1883f147c8c4-kube-api-access-2gvwj\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106269 4712 generic.go:334] "Generic (PLEG): container finished" podID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerID="bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" exitCode=0 Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106323 4712 generic.go:334] "Generic (PLEG): container finished" podID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerID="74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" exitCode=143 Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106332 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106437 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerDied","Data":"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9"} Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106477 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerDied","Data":"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f"} Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106490 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f66f9ae7-e3b2-4313-9020-1883f147c8c4","Type":"ContainerDied","Data":"c164c756e23ef24ca1955ea223888b8ddac02cd21a3ed62bd9ea7680404dc9ff"} Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.106510 4712 scope.go:117] "RemoveContainer" containerID="bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.111681 4712 generic.go:334] "Generic (PLEG): container finished" podID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerID="50f3a772cda8e2ca05746479d9688ba2f8314bbfcc17712987b188afea3f96e5" exitCode=143 Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.111726 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerDied","Data":"50f3a772cda8e2ca05746479d9688ba2f8314bbfcc17712987b188afea3f96e5"} Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.151451 4712 scope.go:117] "RemoveContainer" containerID="74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.164375 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.197579 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208037 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.208519 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb6bb4ae-9226-40ca-8117-3b62a4b91261" containerName="nova-manage" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208544 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb6bb4ae-9226-40ca-8117-3b62a4b91261" containerName="nova-manage" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.208566 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="init" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208576 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="init" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.208596 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-log" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208604 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-log" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.208625 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-metadata" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208634 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-metadata" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.208653 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="dnsmasq-dns" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208661 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="dnsmasq-dns" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208880 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb6bb4ae-9226-40ca-8117-3b62a4b91261" containerName="nova-manage" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208902 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-log" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208915 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" containerName="nova-metadata-metadata" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.208937 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5da79de-08a9-473b-ad68-846cb4403a85" containerName="dnsmasq-dns" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.210069 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.212881 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.214295 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.227207 4712 scope.go:117] "RemoveContainer" containerID="bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.227718 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9\": container with ID starting with bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9 not found: ID does not exist" containerID="bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.227752 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9"} err="failed to get container status \"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9\": rpc error: code = NotFound desc = could not find container \"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9\": container with ID starting with bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9 not found: ID does not exist" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.227777 4712 scope.go:117] "RemoveContainer" containerID="74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.230294 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f\": container with ID starting with 74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f not found: ID does not exist" containerID="74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.230342 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f"} err="failed to get container status \"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f\": rpc error: code = NotFound desc = could not find container \"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f\": container with ID starting with 74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f not found: ID does not exist" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.230373 4712 scope.go:117] "RemoveContainer" containerID="bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.231100 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9"} err="failed to get container status \"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9\": rpc error: code = NotFound desc = could not find container \"bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9\": container with ID starting with bbd2d41714c50b387c023b442fce4e499af88186e3f5a67c449fc735fb9bdff9 not found: ID does not exist" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.231135 4712 scope.go:117] "RemoveContainer" containerID="74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.231426 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f"} err="failed to get container status \"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f\": rpc error: code = NotFound desc = could not find container \"74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f\": container with ID starting with 74755e4f0e20722c0871254de91028c7c1ede4db4c56998b00eab259b019919f not found: ID does not exist" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.240393 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:51 crc kubenswrapper[4712]: E0131 06:03:51.274581 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf66f9ae7_e3b2_4313_9020_1883f147c8c4.slice/crio-c164c756e23ef24ca1955ea223888b8ddac02cd21a3ed62bd9ea7680404dc9ff\": RecentStats: unable to find data in memory cache]" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.371896 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.372216 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.372329 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv2g9\" (UniqueName: \"kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.372482 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.372601 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.474775 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.475304 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.475375 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.475459 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv2g9\" (UniqueName: \"kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.475608 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.476159 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.479948 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.480823 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.481038 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.496455 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv2g9\" (UniqueName: \"kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9\") pod \"nova-metadata-0\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " pod="openstack/nova-metadata-0" Jan 31 06:03:51 crc kubenswrapper[4712]: I0131 06:03:51.534599 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:03:52 crc kubenswrapper[4712]: I0131 06:03:52.017806 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:03:52 crc kubenswrapper[4712]: I0131 06:03:52.122566 4712 generic.go:334] "Generic (PLEG): container finished" podID="d484669e-304d-4389-b015-9479aadf2675" containerID="3681a2c1b37e8bd0ff64b91199e6c2c709faee255dc78a48536bf8904eb1a194" exitCode=0 Jan 31 06:03:52 crc kubenswrapper[4712]: I0131 06:03:52.122628 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" event={"ID":"d484669e-304d-4389-b015-9479aadf2675","Type":"ContainerDied","Data":"3681a2c1b37e8bd0ff64b91199e6c2c709faee255dc78a48536bf8904eb1a194"} Jan 31 06:03:52 crc kubenswrapper[4712]: I0131 06:03:52.124151 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerStarted","Data":"d2a0466c6e84e04ea5287cebf7e1550b1428f58ad56fb553a6975aef69762093"} Jan 31 06:03:52 crc kubenswrapper[4712]: E0131 06:03:52.414331 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:52 crc kubenswrapper[4712]: E0131 06:03:52.416295 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:52 crc kubenswrapper[4712]: E0131 06:03:52.417676 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:52 crc kubenswrapper[4712]: E0131 06:03:52.417712 4712 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" Jan 31 06:03:52 crc kubenswrapper[4712]: I0131 06:03:52.518737 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f66f9ae7-e3b2-4313-9020-1883f147c8c4" path="/var/lib/kubelet/pods/f66f9ae7-e3b2-4313-9020-1883f147c8c4/volumes" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.136217 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerStarted","Data":"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7"} Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.136265 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerStarted","Data":"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281"} Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.169653 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.169635453 podStartE2EDuration="2.169635453s" podCreationTimestamp="2026-01-31 06:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:53.159211325 +0000 UTC m=+1499.253093206" watchObservedRunningTime="2026-01-31 06:03:53.169635453 +0000 UTC m=+1499.263517294" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.505402 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.619805 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data\") pod \"d484669e-304d-4389-b015-9479aadf2675\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.619921 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle\") pod \"d484669e-304d-4389-b015-9479aadf2675\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.619957 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts\") pod \"d484669e-304d-4389-b015-9479aadf2675\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.619989 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfplt\" (UniqueName: \"kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt\") pod \"d484669e-304d-4389-b015-9479aadf2675\" (UID: \"d484669e-304d-4389-b015-9479aadf2675\") " Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.624310 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts" (OuterVolumeSpecName: "scripts") pod "d484669e-304d-4389-b015-9479aadf2675" (UID: "d484669e-304d-4389-b015-9479aadf2675"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.624404 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt" (OuterVolumeSpecName: "kube-api-access-tfplt") pod "d484669e-304d-4389-b015-9479aadf2675" (UID: "d484669e-304d-4389-b015-9479aadf2675"). InnerVolumeSpecName "kube-api-access-tfplt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.646412 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data" (OuterVolumeSpecName: "config-data") pod "d484669e-304d-4389-b015-9479aadf2675" (UID: "d484669e-304d-4389-b015-9479aadf2675"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.647964 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d484669e-304d-4389-b015-9479aadf2675" (UID: "d484669e-304d-4389-b015-9479aadf2675"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.722736 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.723073 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.723209 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d484669e-304d-4389-b015-9479aadf2675-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:53 crc kubenswrapper[4712]: I0131 06:03:53.723298 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfplt\" (UniqueName: \"kubernetes.io/projected/d484669e-304d-4389-b015-9479aadf2675-kube-api-access-tfplt\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.146123 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" event={"ID":"d484669e-304d-4389-b015-9479aadf2675","Type":"ContainerDied","Data":"9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14"} Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.146206 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9118d68e42217c9b7076037f3471341a63d5379a6042cb614dbc2c23fcb9ee14" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.146167 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-kzkg5" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.227912 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 31 06:03:54 crc kubenswrapper[4712]: E0131 06:03:54.230989 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d484669e-304d-4389-b015-9479aadf2675" containerName="nova-cell1-conductor-db-sync" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.231026 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d484669e-304d-4389-b015-9479aadf2675" containerName="nova-cell1-conductor-db-sync" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.231381 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d484669e-304d-4389-b015-9479aadf2675" containerName="nova-cell1-conductor-db-sync" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.232449 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.234502 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.243712 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.335557 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2tbn\" (UniqueName: \"kubernetes.io/projected/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-kube-api-access-z2tbn\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.335634 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.335668 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.437674 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.437839 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2tbn\" (UniqueName: \"kubernetes.io/projected/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-kube-api-access-z2tbn\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.437885 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.442871 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.449837 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.455613 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2tbn\" (UniqueName: \"kubernetes.io/projected/7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7-kube-api-access-z2tbn\") pod \"nova-cell1-conductor-0\" (UID: \"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7\") " pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:54 crc kubenswrapper[4712]: I0131 06:03:54.549432 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:55 crc kubenswrapper[4712]: I0131 06:03:55.012730 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 31 06:03:55 crc kubenswrapper[4712]: I0131 06:03:55.158346 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7","Type":"ContainerStarted","Data":"c6d3366cea72014ee586f8c4947db3fe8cf47834e055f322dfd2fa6994a94f45"} Jan 31 06:03:55 crc kubenswrapper[4712]: I0131 06:03:55.161072 4712 generic.go:334] "Generic (PLEG): container finished" podID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerID="43d985225c897c59f456a99e061bf81c30c2a34af1363b01382923b65f34b89e" exitCode=0 Jan 31 06:03:55 crc kubenswrapper[4712]: I0131 06:03:55.161119 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerDied","Data":"43d985225c897c59f456a99e061bf81c30c2a34af1363b01382923b65f34b89e"} Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.185578 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7","Type":"ContainerStarted","Data":"2bf5ed90ed52e71d8e513d4aff99e2170eed6e12f8631d2ca8ef03940daf27ae"} Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.185932 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.221574 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.221525758 podStartE2EDuration="2.221525758s" podCreationTimestamp="2026-01-31 06:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:56.204401814 +0000 UTC m=+1502.298283675" watchObservedRunningTime="2026-01-31 06:03:56.221525758 +0000 UTC m=+1502.315407609" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.536341 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.538608 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.561045 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.685404 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnhjp\" (UniqueName: \"kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp\") pod \"b079bdfb-7864-472c-af5f-05f2bde6998b\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.685454 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle\") pod \"b079bdfb-7864-472c-af5f-05f2bde6998b\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.685511 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data\") pod \"b079bdfb-7864-472c-af5f-05f2bde6998b\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.685637 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs\") pod \"b079bdfb-7864-472c-af5f-05f2bde6998b\" (UID: \"b079bdfb-7864-472c-af5f-05f2bde6998b\") " Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.686387 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs" (OuterVolumeSpecName: "logs") pod "b079bdfb-7864-472c-af5f-05f2bde6998b" (UID: "b079bdfb-7864-472c-af5f-05f2bde6998b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.686979 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b079bdfb-7864-472c-af5f-05f2bde6998b-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.690742 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp" (OuterVolumeSpecName: "kube-api-access-qnhjp") pod "b079bdfb-7864-472c-af5f-05f2bde6998b" (UID: "b079bdfb-7864-472c-af5f-05f2bde6998b"). InnerVolumeSpecName "kube-api-access-qnhjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.720543 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data" (OuterVolumeSpecName: "config-data") pod "b079bdfb-7864-472c-af5f-05f2bde6998b" (UID: "b079bdfb-7864-472c-af5f-05f2bde6998b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.746680 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b079bdfb-7864-472c-af5f-05f2bde6998b" (UID: "b079bdfb-7864-472c-af5f-05f2bde6998b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.789488 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnhjp\" (UniqueName: \"kubernetes.io/projected/b079bdfb-7864-472c-af5f-05f2bde6998b-kube-api-access-qnhjp\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.789528 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:56 crc kubenswrapper[4712]: I0131 06:03:56.800695 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b079bdfb-7864-472c-af5f-05f2bde6998b-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.195511 4712 generic.go:334] "Generic (PLEG): container finished" podID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" exitCode=0 Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.195584 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0b152827-c2b8-4a87-96ef-e4ec84e4265a","Type":"ContainerDied","Data":"e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f"} Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.198248 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b079bdfb-7864-472c-af5f-05f2bde6998b","Type":"ContainerDied","Data":"15b4e89e636f9fd03c4b216badfe9492bb3087897e7dba73dbdc6c08c48ed44c"} Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.198331 4712 scope.go:117] "RemoveContainer" containerID="43d985225c897c59f456a99e061bf81c30c2a34af1363b01382923b65f34b89e" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.199713 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.222992 4712 scope.go:117] "RemoveContainer" containerID="50f3a772cda8e2ca05746479d9688ba2f8314bbfcc17712987b188afea3f96e5" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.261662 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.288455 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.303369 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.303914 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-log" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.303931 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-log" Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.303976 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-api" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.303985 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-api" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.304314 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-log" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.304335 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" containerName="nova-api-api" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.305537 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.305615 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.319357 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.412484 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f is running failed: container process not found" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.413132 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f is running failed: container process not found" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.413380 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f is running failed: container process not found" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:03:57 crc kubenswrapper[4712]: E0131 06:03:57.413473 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.420566 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.420669 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.420720 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.420772 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzhrg\" (UniqueName: \"kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.522956 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.523051 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzhrg\" (UniqueName: \"kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.523127 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.523207 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.525265 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.530129 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.538677 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.541942 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzhrg\" (UniqueName: \"kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg\") pod \"nova-api-0\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.646459 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.765706 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.932887 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data\") pod \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.933329 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle\") pod \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.933390 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dv4t\" (UniqueName: \"kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t\") pod \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\" (UID: \"0b152827-c2b8-4a87-96ef-e4ec84e4265a\") " Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.941953 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t" (OuterVolumeSpecName: "kube-api-access-6dv4t") pod "0b152827-c2b8-4a87-96ef-e4ec84e4265a" (UID: "0b152827-c2b8-4a87-96ef-e4ec84e4265a"). InnerVolumeSpecName "kube-api-access-6dv4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.962892 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b152827-c2b8-4a87-96ef-e4ec84e4265a" (UID: "0b152827-c2b8-4a87-96ef-e4ec84e4265a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:57 crc kubenswrapper[4712]: I0131 06:03:57.963395 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data" (OuterVolumeSpecName: "config-data") pod "0b152827-c2b8-4a87-96ef-e4ec84e4265a" (UID: "0b152827-c2b8-4a87-96ef-e4ec84e4265a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.036077 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.036115 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dv4t\" (UniqueName: \"kubernetes.io/projected/0b152827-c2b8-4a87-96ef-e4ec84e4265a-kube-api-access-6dv4t\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.036125 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b152827-c2b8-4a87-96ef-e4ec84e4265a-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.168264 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.208001 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0b152827-c2b8-4a87-96ef-e4ec84e4265a","Type":"ContainerDied","Data":"8c8470ebaae6163953c92822bb9a7decd1063d78d0ba76a6fe8ed59d8f71ae42"} Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.209207 4712 scope.go:117] "RemoveContainer" containerID="e3603a6f746d3fc4b38c7b551a5d9b1314b599b89b09ab063b0498c00f754a0f" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.208088 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.225187 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerStarted","Data":"185c095f20f17f6f440dfb9bb2b94c5337a457952de2563cfc5f60fe3df27e69"} Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.265156 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.287952 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.300932 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:58 crc kubenswrapper[4712]: E0131 06:03:58.301598 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.301615 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.301835 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" containerName="nova-scheduler-scheduler" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.302724 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.306545 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.311307 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.445636 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.446014 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg9fv\" (UniqueName: \"kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.446165 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.519422 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b152827-c2b8-4a87-96ef-e4ec84e4265a" path="/var/lib/kubelet/pods/0b152827-c2b8-4a87-96ef-e4ec84e4265a/volumes" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.520104 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b079bdfb-7864-472c-af5f-05f2bde6998b" path="/var/lib/kubelet/pods/b079bdfb-7864-472c-af5f-05f2bde6998b/volumes" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.547627 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.547699 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg9fv\" (UniqueName: \"kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.547787 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.552351 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.553010 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.568707 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg9fv\" (UniqueName: \"kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv\") pod \"nova-scheduler-0\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " pod="openstack/nova-scheduler-0" Jan 31 06:03:58 crc kubenswrapper[4712]: I0131 06:03:58.635391 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:03:59 crc kubenswrapper[4712]: I0131 06:03:59.139076 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:03:59 crc kubenswrapper[4712]: W0131 06:03:59.141181 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2af9dbf_e8b2_4236_bbef_8bb4467e60a1.slice/crio-bb27a85c275f2cb75541786d8f4d8830cea52bd7261fb7ccd0d37c4b9cc83bee WatchSource:0}: Error finding container bb27a85c275f2cb75541786d8f4d8830cea52bd7261fb7ccd0d37c4b9cc83bee: Status 404 returned error can't find the container with id bb27a85c275f2cb75541786d8f4d8830cea52bd7261fb7ccd0d37c4b9cc83bee Jan 31 06:03:59 crc kubenswrapper[4712]: I0131 06:03:59.252883 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1","Type":"ContainerStarted","Data":"bb27a85c275f2cb75541786d8f4d8830cea52bd7261fb7ccd0d37c4b9cc83bee"} Jan 31 06:03:59 crc kubenswrapper[4712]: I0131 06:03:59.256307 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerStarted","Data":"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006"} Jan 31 06:03:59 crc kubenswrapper[4712]: I0131 06:03:59.256354 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerStarted","Data":"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda"} Jan 31 06:03:59 crc kubenswrapper[4712]: I0131 06:03:59.286556 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.286532967 podStartE2EDuration="2.286532967s" podCreationTimestamp="2026-01-31 06:03:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:03:59.279211006 +0000 UTC m=+1505.373092857" watchObservedRunningTime="2026-01-31 06:03:59.286532967 +0000 UTC m=+1505.380414808" Jan 31 06:04:00 crc kubenswrapper[4712]: I0131 06:04:00.270280 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1","Type":"ContainerStarted","Data":"14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1"} Jan 31 06:04:00 crc kubenswrapper[4712]: I0131 06:04:00.292888 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.292864627 podStartE2EDuration="2.292864627s" podCreationTimestamp="2026-01-31 06:03:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:00.288034837 +0000 UTC m=+1506.381916678" watchObservedRunningTime="2026-01-31 06:04:00.292864627 +0000 UTC m=+1506.386746468" Jan 31 06:04:01 crc kubenswrapper[4712]: I0131 06:04:01.534826 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 31 06:04:01 crc kubenswrapper[4712]: I0131 06:04:01.535268 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 31 06:04:02 crc kubenswrapper[4712]: I0131 06:04:02.141752 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 31 06:04:02 crc kubenswrapper[4712]: I0131 06:04:02.547405 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:02 crc kubenswrapper[4712]: I0131 06:04:02.547510 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:03 crc kubenswrapper[4712]: I0131 06:04:03.636510 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 31 06:04:04 crc kubenswrapper[4712]: I0131 06:04:04.583091 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 31 06:04:06 crc kubenswrapper[4712]: I0131 06:04:06.942853 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:06 crc kubenswrapper[4712]: I0131 06:04:06.943521 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="72eaf48a-98d5-44bb-abec-a88630d51ae7" containerName="kube-state-metrics" containerID="cri-o://506652cfe05af02cd801d6b0c1b91751bb6d62ea865452355ba91f1abcb256cf" gracePeriod=30 Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.362947 4712 generic.go:334] "Generic (PLEG): container finished" podID="72eaf48a-98d5-44bb-abec-a88630d51ae7" containerID="506652cfe05af02cd801d6b0c1b91751bb6d62ea865452355ba91f1abcb256cf" exitCode=2 Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.363089 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"72eaf48a-98d5-44bb-abec-a88630d51ae7","Type":"ContainerDied","Data":"506652cfe05af02cd801d6b0c1b91751bb6d62ea865452355ba91f1abcb256cf"} Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.363412 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"72eaf48a-98d5-44bb-abec-a88630d51ae7","Type":"ContainerDied","Data":"72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd"} Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.363439 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72e6ba18426843f8ba66619394823844967b404a8baca8f951a69dd8b6e383bd" Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.454652 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.647655 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.647709 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.660358 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmngt\" (UniqueName: \"kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt\") pod \"72eaf48a-98d5-44bb-abec-a88630d51ae7\" (UID: \"72eaf48a-98d5-44bb-abec-a88630d51ae7\") " Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.669415 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt" (OuterVolumeSpecName: "kube-api-access-xmngt") pod "72eaf48a-98d5-44bb-abec-a88630d51ae7" (UID: "72eaf48a-98d5-44bb-abec-a88630d51ae7"). InnerVolumeSpecName "kube-api-access-xmngt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:07 crc kubenswrapper[4712]: I0131 06:04:07.762406 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmngt\" (UniqueName: \"kubernetes.io/projected/72eaf48a-98d5-44bb-abec-a88630d51ae7-kube-api-access-xmngt\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.373287 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.410216 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.426900 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.439820 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:08 crc kubenswrapper[4712]: E0131 06:04:08.440348 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72eaf48a-98d5-44bb-abec-a88630d51ae7" containerName="kube-state-metrics" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.440368 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="72eaf48a-98d5-44bb-abec-a88630d51ae7" containerName="kube-state-metrics" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.440584 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="72eaf48a-98d5-44bb-abec-a88630d51ae7" containerName="kube-state-metrics" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.441338 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.445559 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.451372 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.454586 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.517772 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72eaf48a-98d5-44bb-abec-a88630d51ae7" path="/var/lib/kubelet/pods/72eaf48a-98d5-44bb-abec-a88630d51ae7/volumes" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.577765 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6xmg\" (UniqueName: \"kubernetes.io/projected/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-api-access-s6xmg\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.577935 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.577986 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.578228 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.636382 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.668123 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.681601 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.682585 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6xmg\" (UniqueName: \"kubernetes.io/projected/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-api-access-s6xmg\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.682767 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.682799 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.690217 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.691530 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.702071 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.705898 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6xmg\" (UniqueName: \"kubernetes.io/projected/2f56d276-e653-4e6b-b5a7-e530babf7175-kube-api-access-s6xmg\") pod \"kube-state-metrics-0\" (UID: \"2f56d276-e653-4e6b-b5a7-e530babf7175\") " pod="openstack/kube-state-metrics-0" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.731405 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.731416 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:08 crc kubenswrapper[4712]: I0131 06:04:08.774969 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.255045 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.390142 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2f56d276-e653-4e6b-b5a7-e530babf7175","Type":"ContainerStarted","Data":"53f428f30b0d1fe71e6ec69a6e915c1d7e23b72a3a0ca7365d0e437408cad450"} Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.420668 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.490914 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.491446 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="proxy-httpd" containerID="cri-o://f50126ac4addac8cf370aa7bc075f9c6b40b23f4d00cffeef7348a08b93fead2" gracePeriod=30 Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.491452 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="sg-core" containerID="cri-o://400243b0e49f775921d6d1f11e0554208eb7dd21ad91ff199f55c1dcef097cfa" gracePeriod=30 Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.491476 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-notification-agent" containerID="cri-o://ffe351fbbd6fc1b2d62280df4bfcf3b10438ce8bb90c044786c2c9e272ca1b2f" gracePeriod=30 Jan 31 06:04:09 crc kubenswrapper[4712]: I0131 06:04:09.491758 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-central-agent" containerID="cri-o://16529ce4a3397587f366d6846b2ffc01977be77a0b2a36dbf6e6dd6daf3425ea" gracePeriod=30 Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.421867 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerID="f50126ac4addac8cf370aa7bc075f9c6b40b23f4d00cffeef7348a08b93fead2" exitCode=0 Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.423965 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerID="400243b0e49f775921d6d1f11e0554208eb7dd21ad91ff199f55c1dcef097cfa" exitCode=2 Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.424313 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerID="16529ce4a3397587f366d6846b2ffc01977be77a0b2a36dbf6e6dd6daf3425ea" exitCode=0 Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.423260 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerDied","Data":"f50126ac4addac8cf370aa7bc075f9c6b40b23f4d00cffeef7348a08b93fead2"} Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.424685 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerDied","Data":"400243b0e49f775921d6d1f11e0554208eb7dd21ad91ff199f55c1dcef097cfa"} Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.424777 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerDied","Data":"16529ce4a3397587f366d6846b2ffc01977be77a0b2a36dbf6e6dd6daf3425ea"} Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.437470 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2f56d276-e653-4e6b-b5a7-e530babf7175","Type":"ContainerStarted","Data":"150609ec0248ccacf01ed8952cc64e81ed0484e50c6bbd744715dd6f29ab354a"} Jan 31 06:04:10 crc kubenswrapper[4712]: I0131 06:04:10.460689 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.978444869 podStartE2EDuration="2.460563062s" podCreationTimestamp="2026-01-31 06:04:08 +0000 UTC" firstStartedPulling="2026-01-31 06:04:09.261265242 +0000 UTC m=+1515.355147083" lastFinishedPulling="2026-01-31 06:04:09.743383435 +0000 UTC m=+1515.837265276" observedRunningTime="2026-01-31 06:04:10.460503201 +0000 UTC m=+1516.554385042" watchObservedRunningTime="2026-01-31 06:04:10.460563062 +0000 UTC m=+1516.554444903" Jan 31 06:04:11 crc kubenswrapper[4712]: I0131 06:04:11.447469 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 31 06:04:11 crc kubenswrapper[4712]: I0131 06:04:11.543490 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 31 06:04:11 crc kubenswrapper[4712]: I0131 06:04:11.546737 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 31 06:04:11 crc kubenswrapper[4712]: I0131 06:04:11.551514 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.459808 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerID="ffe351fbbd6fc1b2d62280df4bfcf3b10438ce8bb90c044786c2c9e272ca1b2f" exitCode=0 Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.459889 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerDied","Data":"ffe351fbbd6fc1b2d62280df4bfcf3b10438ce8bb90c044786c2c9e272ca1b2f"} Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.465262 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.497945 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.498042 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.498112 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.499077 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.499158 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37" gracePeriod=600 Jan 31 06:04:12 crc kubenswrapper[4712]: I0131 06:04:12.878024 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.078862 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079325 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079412 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079450 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079546 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqc86\" (UniqueName: \"kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079626 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.079718 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml\") pod \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\" (UID: \"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5\") " Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.087030 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86" (OuterVolumeSpecName: "kube-api-access-hqc86") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "kube-api-access-hqc86". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.087209 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts" (OuterVolumeSpecName: "scripts") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.111288 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.117953 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.132868 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.179903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182816 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqc86\" (UniqueName: \"kubernetes.io/projected/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-kube-api-access-hqc86\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182852 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182910 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182925 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182938 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.182973 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.186333 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data" (OuterVolumeSpecName: "config-data") pod "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" (UID: "c0eaa8b0-a7b5-41af-ba8d-dc01413185d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.283726 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.472435 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37" exitCode=0 Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.472508 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37"} Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.472541 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453"} Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.472558 4712 scope.go:117] "RemoveContainer" containerID="0d66f63e01719bf8f3e02142623cdb63b4fc7cc229aaa57643fb27385d070e76" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.477497 4712 generic.go:334] "Generic (PLEG): container finished" podID="57f643a8-d361-48e8-a982-df43b7a01b65" containerID="5e85318643e4f4c8317b2eb4076750900e2077075052a51df54abaf15003905a" exitCode=137 Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.477575 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"57f643a8-d361-48e8-a982-df43b7a01b65","Type":"ContainerDied","Data":"5e85318643e4f4c8317b2eb4076750900e2077075052a51df54abaf15003905a"} Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.481801 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0eaa8b0-a7b5-41af-ba8d-dc01413185d5","Type":"ContainerDied","Data":"b7947cf307e84e416ea82289c4e9e9eb207e196b5f0895376def5bbabdaa53fc"} Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.481839 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.541701 4712 scope.go:117] "RemoveContainer" containerID="f50126ac4addac8cf370aa7bc075f9c6b40b23f4d00cffeef7348a08b93fead2" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.569513 4712 scope.go:117] "RemoveContainer" containerID="400243b0e49f775921d6d1f11e0554208eb7dd21ad91ff199f55c1dcef097cfa" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.574015 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.614286 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.624461 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:13 crc kubenswrapper[4712]: E0131 06:04:13.625108 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="proxy-httpd" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625133 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="proxy-httpd" Jan 31 06:04:13 crc kubenswrapper[4712]: E0131 06:04:13.625161 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-notification-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625188 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-notification-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: E0131 06:04:13.625211 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="sg-core" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625220 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="sg-core" Jan 31 06:04:13 crc kubenswrapper[4712]: E0131 06:04:13.625253 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-central-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625262 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-central-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625505 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-central-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625532 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="sg-core" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625547 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="ceilometer-notification-agent" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.625571 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" containerName="proxy-httpd" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.627417 4712 scope.go:117] "RemoveContainer" containerID="ffe351fbbd6fc1b2d62280df4bfcf3b10438ce8bb90c044786c2c9e272ca1b2f" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.628147 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.630751 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.630928 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.631106 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.640635 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.677487 4712 scope.go:117] "RemoveContainer" containerID="16529ce4a3397587f366d6846b2ffc01977be77a0b2a36dbf6e6dd6daf3425ea" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.797957 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798000 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798069 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798093 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798232 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n92l\" (UniqueName: \"kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798342 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798383 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.798400 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900630 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n92l\" (UniqueName: \"kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900775 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900813 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900837 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900887 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900907 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.900970 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.901787 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.901810 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.902075 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.909010 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.925959 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.926711 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.926951 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.927142 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.939913 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n92l\" (UniqueName: \"kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l\") pod \"ceilometer-0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.976265 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:13 crc kubenswrapper[4712]: I0131 06:04:13.984425 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.110758 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle\") pod \"57f643a8-d361-48e8-a982-df43b7a01b65\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.111429 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt986\" (UniqueName: \"kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986\") pod \"57f643a8-d361-48e8-a982-df43b7a01b65\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.111614 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data\") pod \"57f643a8-d361-48e8-a982-df43b7a01b65\" (UID: \"57f643a8-d361-48e8-a982-df43b7a01b65\") " Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.117004 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986" (OuterVolumeSpecName: "kube-api-access-zt986") pod "57f643a8-d361-48e8-a982-df43b7a01b65" (UID: "57f643a8-d361-48e8-a982-df43b7a01b65"). InnerVolumeSpecName "kube-api-access-zt986". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.150320 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57f643a8-d361-48e8-a982-df43b7a01b65" (UID: "57f643a8-d361-48e8-a982-df43b7a01b65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.153340 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data" (OuterVolumeSpecName: "config-data") pod "57f643a8-d361-48e8-a982-df43b7a01b65" (UID: "57f643a8-d361-48e8-a982-df43b7a01b65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.214380 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.214420 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57f643a8-d361-48e8-a982-df43b7a01b65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.214443 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt986\" (UniqueName: \"kubernetes.io/projected/57f643a8-d361-48e8-a982-df43b7a01b65-kube-api-access-zt986\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.495759 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:14 crc kubenswrapper[4712]: W0131 06:04:14.518260 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cd3c310_b8d2_44e9_86d8_f85c9c2c13d0.slice/crio-084cee88f5d15328eb3bc8edb5393ef7dd71c1bb83a88866e17eda8b1a7cbe4a WatchSource:0}: Error finding container 084cee88f5d15328eb3bc8edb5393ef7dd71c1bb83a88866e17eda8b1a7cbe4a: Status 404 returned error can't find the container with id 084cee88f5d15328eb3bc8edb5393ef7dd71c1bb83a88866e17eda8b1a7cbe4a Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.526992 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.532616 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0eaa8b0-a7b5-41af-ba8d-dc01413185d5" path="/var/lib/kubelet/pods/c0eaa8b0-a7b5-41af-ba8d-dc01413185d5/volumes" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.533804 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"57f643a8-d361-48e8-a982-df43b7a01b65","Type":"ContainerDied","Data":"9f8072fdda30904a6b0a960337823ebbd8801dd55d749aba0786d1a4a21354a2"} Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.533852 4712 scope.go:117] "RemoveContainer" containerID="5e85318643e4f4c8317b2eb4076750900e2077075052a51df54abaf15003905a" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.585041 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.607487 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.618090 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:04:14 crc kubenswrapper[4712]: E0131 06:04:14.618620 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f643a8-d361-48e8-a982-df43b7a01b65" containerName="nova-cell1-novncproxy-novncproxy" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.618641 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f643a8-d361-48e8-a982-df43b7a01b65" containerName="nova-cell1-novncproxy-novncproxy" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.618876 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f643a8-d361-48e8-a982-df43b7a01b65" containerName="nova-cell1-novncproxy-novncproxy" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.619839 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.625000 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.625266 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.625477 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.627833 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.727844 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.728253 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.728383 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.728411 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pml9f\" (UniqueName: \"kubernetes.io/projected/ab75dbe3-c922-4440-b310-6fe0d2201274-kube-api-access-pml9f\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.728461 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.830383 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.830451 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pml9f\" (UniqueName: \"kubernetes.io/projected/ab75dbe3-c922-4440-b310-6fe0d2201274-kube-api-access-pml9f\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.830541 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.830618 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.830653 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.837276 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.837293 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.839656 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.842772 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab75dbe3-c922-4440-b310-6fe0d2201274-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.851528 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pml9f\" (UniqueName: \"kubernetes.io/projected/ab75dbe3-c922-4440-b310-6fe0d2201274-kube-api-access-pml9f\") pod \"nova-cell1-novncproxy-0\" (UID: \"ab75dbe3-c922-4440-b310-6fe0d2201274\") " pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:14 crc kubenswrapper[4712]: I0131 06:04:14.982432 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:15 crc kubenswrapper[4712]: I0131 06:04:15.445429 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 31 06:04:15 crc kubenswrapper[4712]: I0131 06:04:15.569988 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab75dbe3-c922-4440-b310-6fe0d2201274","Type":"ContainerStarted","Data":"131a12132a6ee020c5fef2b78f3470a7a20b9d3d4bfe51738382c4a90817e2de"} Jan 31 06:04:15 crc kubenswrapper[4712]: I0131 06:04:15.573195 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerStarted","Data":"b2b6b6e6dae9956d28fdea2e41f6e778b636a4d71372365030b195ab5841e97b"} Jan 31 06:04:15 crc kubenswrapper[4712]: I0131 06:04:15.573231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerStarted","Data":"1b5ebd4101d9562bfaa657983d468321c203a595ed324105802562ad9e7eb364"} Jan 31 06:04:15 crc kubenswrapper[4712]: I0131 06:04:15.573243 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerStarted","Data":"084cee88f5d15328eb3bc8edb5393ef7dd71c1bb83a88866e17eda8b1a7cbe4a"} Jan 31 06:04:16 crc kubenswrapper[4712]: I0131 06:04:16.519511 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f643a8-d361-48e8-a982-df43b7a01b65" path="/var/lib/kubelet/pods/57f643a8-d361-48e8-a982-df43b7a01b65/volumes" Jan 31 06:04:16 crc kubenswrapper[4712]: I0131 06:04:16.585924 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ab75dbe3-c922-4440-b310-6fe0d2201274","Type":"ContainerStarted","Data":"c94fc8084c9b3bbb65f94f5c91a707879347a0245af2d982475c9c3659c1d11d"} Jan 31 06:04:16 crc kubenswrapper[4712]: I0131 06:04:16.589873 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerStarted","Data":"f841a7c6623e2053a76af860d7f20e67169b8ff0119a9e7dbe9d83eea843a620"} Jan 31 06:04:16 crc kubenswrapper[4712]: I0131 06:04:16.618879 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.618845561 podStartE2EDuration="2.618845561s" podCreationTimestamp="2026-01-31 06:04:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:16.608000842 +0000 UTC m=+1522.701882683" watchObservedRunningTime="2026-01-31 06:04:16.618845561 +0000 UTC m=+1522.712727402" Jan 31 06:04:17 crc kubenswrapper[4712]: I0131 06:04:17.656013 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 31 06:04:17 crc kubenswrapper[4712]: I0131 06:04:17.656530 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 31 06:04:17 crc kubenswrapper[4712]: I0131 06:04:17.658622 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 31 06:04:17 crc kubenswrapper[4712]: I0131 06:04:17.668002 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.610279 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.625883 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.816068 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.818153 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827376 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827455 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827481 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbfc4\" (UniqueName: \"kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827552 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827623 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.827665 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.831544 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.837294 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929666 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929720 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929760 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929780 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbfc4\" (UniqueName: \"kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929842 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.929913 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.930814 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.931258 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.931691 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.931888 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.931908 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:18 crc kubenswrapper[4712]: I0131 06:04:18.959984 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbfc4\" (UniqueName: \"kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4\") pod \"dnsmasq-dns-5b9b6b4b89-4tz4m\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:19 crc kubenswrapper[4712]: I0131 06:04:19.150118 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:19 crc kubenswrapper[4712]: I0131 06:04:19.620324 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerStarted","Data":"e5c63bbd12354cebd4e3fc7c58a7cda9df4173fa086de9425ec889d4e7b52cad"} Jan 31 06:04:19 crc kubenswrapper[4712]: I0131 06:04:19.665795 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.829593889 podStartE2EDuration="6.665766263s" podCreationTimestamp="2026-01-31 06:04:13 +0000 UTC" firstStartedPulling="2026-01-31 06:04:14.547159589 +0000 UTC m=+1520.641041430" lastFinishedPulling="2026-01-31 06:04:18.383331923 +0000 UTC m=+1524.477213804" observedRunningTime="2026-01-31 06:04:19.652212656 +0000 UTC m=+1525.746094507" watchObservedRunningTime="2026-01-31 06:04:19.665766263 +0000 UTC m=+1525.759648104" Jan 31 06:04:19 crc kubenswrapper[4712]: I0131 06:04:19.753240 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:04:19 crc kubenswrapper[4712]: I0131 06:04:19.982726 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:20 crc kubenswrapper[4712]: I0131 06:04:20.630540 4712 generic.go:334] "Generic (PLEG): container finished" podID="51718f5c-e32d-438f-acba-06d5a797b316" containerID="75936447e03c20aa958d4ccd66376c411ed81cf2f1f315a1718dce6e3f68f815" exitCode=0 Jan 31 06:04:20 crc kubenswrapper[4712]: I0131 06:04:20.630606 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" event={"ID":"51718f5c-e32d-438f-acba-06d5a797b316","Type":"ContainerDied","Data":"75936447e03c20aa958d4ccd66376c411ed81cf2f1f315a1718dce6e3f68f815"} Jan 31 06:04:20 crc kubenswrapper[4712]: I0131 06:04:20.631038 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" event={"ID":"51718f5c-e32d-438f-acba-06d5a797b316","Type":"ContainerStarted","Data":"017c7bea2ad072f7d7ee8e07b07f384cf1e9a731c83ab19c1c526f62c62422e7"} Jan 31 06:04:20 crc kubenswrapper[4712]: I0131 06:04:20.631471 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.097782 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.641942 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" event={"ID":"51718f5c-e32d-438f-acba-06d5a797b316","Type":"ContainerStarted","Data":"ee32e834fe986a5f8e2ce9c1b9142d868ea133db1b4431eef9092ca4a9998cf3"} Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.642113 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-log" containerID="cri-o://796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda" gracePeriod=30 Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.642163 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-api" containerID="cri-o://aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006" gracePeriod=30 Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.643734 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:21 crc kubenswrapper[4712]: I0131 06:04:21.679397 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" podStartSLOduration=3.679366375 podStartE2EDuration="3.679366375s" podCreationTimestamp="2026-01-31 06:04:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:21.667309507 +0000 UTC m=+1527.761191348" watchObservedRunningTime="2026-01-31 06:04:21.679366375 +0000 UTC m=+1527.773248216" Jan 31 06:04:22 crc kubenswrapper[4712]: I0131 06:04:22.654558 4712 generic.go:334] "Generic (PLEG): container finished" podID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerID="796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda" exitCode=143 Jan 31 06:04:22 crc kubenswrapper[4712]: I0131 06:04:22.654645 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerDied","Data":"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda"} Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.252435 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.253246 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-central-agent" containerID="cri-o://1b5ebd4101d9562bfaa657983d468321c203a595ed324105802562ad9e7eb364" gracePeriod=30 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.253326 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="proxy-httpd" containerID="cri-o://e5c63bbd12354cebd4e3fc7c58a7cda9df4173fa086de9425ec889d4e7b52cad" gracePeriod=30 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.253367 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="sg-core" containerID="cri-o://f841a7c6623e2053a76af860d7f20e67169b8ff0119a9e7dbe9d83eea843a620" gracePeriod=30 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.253418 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-notification-agent" containerID="cri-o://b2b6b6e6dae9956d28fdea2e41f6e778b636a4d71372365030b195ab5841e97b" gracePeriod=30 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.679908 4712 generic.go:334] "Generic (PLEG): container finished" podID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerID="e5c63bbd12354cebd4e3fc7c58a7cda9df4173fa086de9425ec889d4e7b52cad" exitCode=0 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.679961 4712 generic.go:334] "Generic (PLEG): container finished" podID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerID="f841a7c6623e2053a76af860d7f20e67169b8ff0119a9e7dbe9d83eea843a620" exitCode=2 Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.679993 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerDied","Data":"e5c63bbd12354cebd4e3fc7c58a7cda9df4173fa086de9425ec889d4e7b52cad"} Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.680052 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerDied","Data":"f841a7c6623e2053a76af860d7f20e67169b8ff0119a9e7dbe9d83eea843a620"} Jan 31 06:04:24 crc kubenswrapper[4712]: I0131 06:04:24.986378 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.006114 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.197365 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.287007 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle\") pod \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.287067 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs\") pod \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.287136 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data\") pod \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.287164 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzhrg\" (UniqueName: \"kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg\") pod \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\" (UID: \"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308\") " Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.288659 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs" (OuterVolumeSpecName: "logs") pod "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" (UID: "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.297856 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg" (OuterVolumeSpecName: "kube-api-access-kzhrg") pod "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" (UID: "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308"). InnerVolumeSpecName "kube-api-access-kzhrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.330915 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data" (OuterVolumeSpecName: "config-data") pod "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" (UID: "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.340191 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" (UID: "1c5b46f9-fdd1-4ee5-a629-f4b388b5c308"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.389458 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.389485 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.389495 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.389506 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzhrg\" (UniqueName: \"kubernetes.io/projected/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308-kube-api-access-kzhrg\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.693834 4712 generic.go:334] "Generic (PLEG): container finished" podID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerID="b2b6b6e6dae9956d28fdea2e41f6e778b636a4d71372365030b195ab5841e97b" exitCode=0 Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.693933 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerDied","Data":"b2b6b6e6dae9956d28fdea2e41f6e778b636a4d71372365030b195ab5841e97b"} Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.698030 4712 generic.go:334] "Generic (PLEG): container finished" podID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerID="aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006" exitCode=0 Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.698076 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerDied","Data":"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006"} Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.698101 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.698119 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1c5b46f9-fdd1-4ee5-a629-f4b388b5c308","Type":"ContainerDied","Data":"185c095f20f17f6f440dfb9bb2b94c5337a457952de2563cfc5f60fe3df27e69"} Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.698145 4712 scope.go:117] "RemoveContainer" containerID="aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.722558 4712 scope.go:117] "RemoveContainer" containerID="796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.737722 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.755428 4712 scope.go:117] "RemoveContainer" containerID="aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.758533 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:25 crc kubenswrapper[4712]: E0131 06:04:25.759469 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006\": container with ID starting with aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006 not found: ID does not exist" containerID="aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.759544 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006"} err="failed to get container status \"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006\": rpc error: code = NotFound desc = could not find container \"aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006\": container with ID starting with aec847a7a9fc277e010e81e5d65418f02985f40862650ff113ed9adb9f1df006 not found: ID does not exist" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.759594 4712 scope.go:117] "RemoveContainer" containerID="796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda" Jan 31 06:04:25 crc kubenswrapper[4712]: E0131 06:04:25.771364 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda\": container with ID starting with 796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda not found: ID does not exist" containerID="796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.771429 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda"} err="failed to get container status \"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda\": rpc error: code = NotFound desc = could not find container \"796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda\": container with ID starting with 796fc3456e58515613df84c6b84b48efe165677c9e629dc00c14a121b5909fda not found: ID does not exist" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.775622 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.786722 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:25 crc kubenswrapper[4712]: E0131 06:04:25.787239 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-log" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.787259 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-log" Jan 31 06:04:25 crc kubenswrapper[4712]: E0131 06:04:25.787276 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-api" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.787282 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-api" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.787511 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-api" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.787539 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" containerName="nova-api-log" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.788653 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.799789 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.799930 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.799996 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.810001 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.903796 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.903851 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.903892 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.903909 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.903950 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:25 crc kubenswrapper[4712]: I0131 06:04:25.904286 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqllz\" (UniqueName: \"kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.005826 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.005981 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.006083 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.006111 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.006188 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.006244 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqllz\" (UniqueName: \"kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.006377 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.009886 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-r72q5"] Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.011513 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.014621 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.014853 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.014993 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.015023 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.022785 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.023665 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.025253 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-r72q5"] Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.044732 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqllz\" (UniqueName: \"kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz\") pod \"nova-api-0\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.124303 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.210858 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.210933 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lpwq\" (UniqueName: \"kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.211009 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.211155 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.314602 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.314666 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lpwq\" (UniqueName: \"kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.314711 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.314825 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.321830 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.322720 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.325687 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.333783 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lpwq\" (UniqueName: \"kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq\") pod \"nova-cell1-cell-mapping-r72q5\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.427392 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.529988 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c5b46f9-fdd1-4ee5-a629-f4b388b5c308" path="/var/lib/kubelet/pods/1c5b46f9-fdd1-4ee5-a629-f4b388b5c308/volumes" Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.660520 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.741043 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerStarted","Data":"18a4bb921e8e437c113a0df9e83d67ce916a3c092fb8f0b96a8f506e8614b7aa"} Jan 31 06:04:26 crc kubenswrapper[4712]: I0131 06:04:26.880892 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-r72q5"] Jan 31 06:04:26 crc kubenswrapper[4712]: W0131 06:04:26.887055 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb012712b_f57a_4334_aa1c_0264aedf8e5c.slice/crio-d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d WatchSource:0}: Error finding container d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d: Status 404 returned error can't find the container with id d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.769686 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r72q5" event={"ID":"b012712b-f57a-4334-aa1c-0264aedf8e5c","Type":"ContainerStarted","Data":"b1c4ac3096018c230454730faf2842e755cacfcc217cb8c81c81b581e7723af5"} Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.770102 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r72q5" event={"ID":"b012712b-f57a-4334-aa1c-0264aedf8e5c","Type":"ContainerStarted","Data":"d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d"} Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.777042 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerStarted","Data":"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58"} Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.777087 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerStarted","Data":"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e"} Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.788464 4712 generic.go:334] "Generic (PLEG): container finished" podID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerID="1b5ebd4101d9562bfaa657983d468321c203a595ed324105802562ad9e7eb364" exitCode=0 Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.788519 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerDied","Data":"1b5ebd4101d9562bfaa657983d468321c203a595ed324105802562ad9e7eb364"} Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.804831 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-r72q5" podStartSLOduration=2.804807971 podStartE2EDuration="2.804807971s" podCreationTimestamp="2026-01-31 06:04:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:27.795798328 +0000 UTC m=+1533.889680169" watchObservedRunningTime="2026-01-31 06:04:27.804807971 +0000 UTC m=+1533.898689812" Jan 31 06:04:27 crc kubenswrapper[4712]: I0131 06:04:27.832489 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.832470337 podStartE2EDuration="2.832470337s" podCreationTimestamp="2026-01-31 06:04:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:27.825697189 +0000 UTC m=+1533.919579040" watchObservedRunningTime="2026-01-31 06:04:27.832470337 +0000 UTC m=+1533.926352178" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.154441 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.161962 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.163621 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.164578 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.165125 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.165349 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.165515 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.166146 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n92l\" (UniqueName: \"kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.166505 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.167566 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts\") pod \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\" (UID: \"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0\") " Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.166054 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.172219 4712 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.172245 4712 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.173361 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l" (OuterVolumeSpecName: "kube-api-access-8n92l") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "kube-api-access-8n92l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.174520 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts" (OuterVolumeSpecName: "scripts") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.217475 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.232984 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.256580 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.274620 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.274950 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n92l\" (UniqueName: \"kubernetes.io/projected/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-kube-api-access-8n92l\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.275044 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.275188 4712 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.275280 4712 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.312541 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data" (OuterVolumeSpecName: "config-data") pod "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" (UID: "5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.377548 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.805024 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0","Type":"ContainerDied","Data":"084cee88f5d15328eb3bc8edb5393ef7dd71c1bb83a88866e17eda8b1a7cbe4a"} Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.805110 4712 scope.go:117] "RemoveContainer" containerID="e5c63bbd12354cebd4e3fc7c58a7cda9df4173fa086de9425ec889d4e7b52cad" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.805349 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.831688 4712 scope.go:117] "RemoveContainer" containerID="f841a7c6623e2053a76af860d7f20e67169b8ff0119a9e7dbe9d83eea843a620" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.835652 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.850566 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.865337 4712 scope.go:117] "RemoveContainer" containerID="b2b6b6e6dae9956d28fdea2e41f6e778b636a4d71372365030b195ab5841e97b" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.869402 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:28 crc kubenswrapper[4712]: E0131 06:04:28.869914 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="proxy-httpd" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.869935 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="proxy-httpd" Jan 31 06:04:28 crc kubenswrapper[4712]: E0131 06:04:28.869951 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="sg-core" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.869957 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="sg-core" Jan 31 06:04:28 crc kubenswrapper[4712]: E0131 06:04:28.869974 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-central-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.869981 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-central-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: E0131 06:04:28.869990 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-notification-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.869996 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-notification-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.870210 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="proxy-httpd" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.870224 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="sg-core" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.870241 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-notification-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.870253 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" containerName="ceilometer-central-agent" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.871891 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.887277 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.887468 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.887747 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888454 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888490 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888578 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-log-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888662 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-run-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888731 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-scripts\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888754 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888782 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-config-data\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.888835 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xwzk\" (UniqueName: \"kubernetes.io/projected/9e9e028b-835f-498c-a16a-88a444ee2739-kube-api-access-5xwzk\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.907280 4712 scope.go:117] "RemoveContainer" containerID="1b5ebd4101d9562bfaa657983d468321c203a595ed324105802562ad9e7eb364" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.914511 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991214 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-log-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991318 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-run-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991403 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-scripts\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991436 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991466 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-config-data\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991516 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xwzk\" (UniqueName: \"kubernetes.io/projected/9e9e028b-835f-498c-a16a-88a444ee2739-kube-api-access-5xwzk\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991551 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.991571 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.992662 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-log-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.992959 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e9e028b-835f-498c-a16a-88a444ee2739-run-httpd\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.998245 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-scripts\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.998495 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:28 crc kubenswrapper[4712]: I0131 06:04:28.999138 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.002906 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.015820 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e028b-835f-498c-a16a-88a444ee2739-config-data\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.030396 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xwzk\" (UniqueName: \"kubernetes.io/projected/9e9e028b-835f-498c-a16a-88a444ee2739-kube-api-access-5xwzk\") pod \"ceilometer-0\" (UID: \"9e9e028b-835f-498c-a16a-88a444ee2739\") " pod="openstack/ceilometer-0" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.152337 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.191774 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.288779 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.289239 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="dnsmasq-dns" containerID="cri-o://023b9d793e9cce5c9385c5522f90000495c22c9cb4f8a520db05b330bf39469a" gracePeriod=10 Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.822265 4712 generic.go:334] "Generic (PLEG): container finished" podID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerID="023b9d793e9cce5c9385c5522f90000495c22c9cb4f8a520db05b330bf39469a" exitCode=0 Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.822609 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" event={"ID":"a7036754-54d9-4ca3-a12e-0ff31a7acb2a","Type":"ContainerDied","Data":"023b9d793e9cce5c9385c5522f90000495c22c9cb4f8a520db05b330bf39469a"} Jan 31 06:04:29 crc kubenswrapper[4712]: W0131 06:04:29.868452 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e9e028b_835f_498c_a16a_88a444ee2739.slice/crio-be2a47af32d12e93d0a44ab887be4ce0252999e8c28a60ec5484830881c77cf3 WatchSource:0}: Error finding container be2a47af32d12e93d0a44ab887be4ce0252999e8c28a60ec5484830881c77cf3: Status 404 returned error can't find the container with id be2a47af32d12e93d0a44ab887be4ce0252999e8c28a60ec5484830881c77cf3 Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.868651 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 31 06:04:29 crc kubenswrapper[4712]: I0131 06:04:29.925232 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.017891 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vtsw\" (UniqueName: \"kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.018223 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.018258 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.018330 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.018368 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.018781 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.055128 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw" (OuterVolumeSpecName: "kube-api-access-7vtsw") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "kube-api-access-7vtsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.071449 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.095955 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config" (OuterVolumeSpecName: "config") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.103735 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.107862 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.121076 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.121402 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") pod \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\" (UID: \"a7036754-54d9-4ca3-a12e-0ff31a7acb2a\") " Jan 31 06:04:30 crc kubenswrapper[4712]: W0131 06:04:30.121572 4712 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/a7036754-54d9-4ca3-a12e-0ff31a7acb2a/volumes/kubernetes.io~configmap/dns-swift-storage-0 Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.121593 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a7036754-54d9-4ca3-a12e-0ff31a7acb2a" (UID: "a7036754-54d9-4ca3-a12e-0ff31a7acb2a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122141 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122181 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122196 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122208 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122219 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.122231 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vtsw\" (UniqueName: \"kubernetes.io/projected/a7036754-54d9-4ca3-a12e-0ff31a7acb2a-kube-api-access-7vtsw\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.519546 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0" path="/var/lib/kubelet/pods/5cd3c310-b8d2-44e9-86d8-f85c9c2c13d0/volumes" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.835533 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" event={"ID":"a7036754-54d9-4ca3-a12e-0ff31a7acb2a","Type":"ContainerDied","Data":"76d154d01e4eab9553c167fc139a3541cdb9ec2349b8f10763f5019e5e5fd5c9"} Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.835938 4712 scope.go:117] "RemoveContainer" containerID="023b9d793e9cce5c9385c5522f90000495c22c9cb4f8a520db05b330bf39469a" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.836094 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbbd958c-sm4v8" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.839842 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e9e028b-835f-498c-a16a-88a444ee2739","Type":"ContainerStarted","Data":"f162f965e2c88f342c198f4f87b88617aff98d3d3e658496e930dd11d1ff8264"} Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.839888 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e9e028b-835f-498c-a16a-88a444ee2739","Type":"ContainerStarted","Data":"be2a47af32d12e93d0a44ab887be4ce0252999e8c28a60ec5484830881c77cf3"} Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.881542 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.881784 4712 scope.go:117] "RemoveContainer" containerID="288bde4d6ccc617d7ba335da3f2d56f8e537d7f2c002e15c58af19d3ffafc628" Jan 31 06:04:30 crc kubenswrapper[4712]: I0131 06:04:30.895914 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56bbbd958c-sm4v8"] Jan 31 06:04:31 crc kubenswrapper[4712]: I0131 06:04:31.853648 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e9e028b-835f-498c-a16a-88a444ee2739","Type":"ContainerStarted","Data":"1307f1474e183aceb7c3e64acd59bbed0d7312c2f49292c5214c5fef59c87778"} Jan 31 06:04:32 crc kubenswrapper[4712]: I0131 06:04:32.515386 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" path="/var/lib/kubelet/pods/a7036754-54d9-4ca3-a12e-0ff31a7acb2a/volumes" Jan 31 06:04:32 crc kubenswrapper[4712]: I0131 06:04:32.865784 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e9e028b-835f-498c-a16a-88a444ee2739","Type":"ContainerStarted","Data":"46191a62cff83ffbac30b6e27d7b19c6dbba7ab9c024539c4975f540eefa2dc5"} Jan 31 06:04:33 crc kubenswrapper[4712]: I0131 06:04:33.878394 4712 generic.go:334] "Generic (PLEG): container finished" podID="b012712b-f57a-4334-aa1c-0264aedf8e5c" containerID="b1c4ac3096018c230454730faf2842e755cacfcc217cb8c81c81b581e7723af5" exitCode=0 Jan 31 06:04:33 crc kubenswrapper[4712]: I0131 06:04:33.878472 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r72q5" event={"ID":"b012712b-f57a-4334-aa1c-0264aedf8e5c","Type":"ContainerDied","Data":"b1c4ac3096018c230454730faf2842e755cacfcc217cb8c81c81b581e7723af5"} Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.307095 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.434328 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle\") pod \"b012712b-f57a-4334-aa1c-0264aedf8e5c\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.434538 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data\") pod \"b012712b-f57a-4334-aa1c-0264aedf8e5c\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.434591 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lpwq\" (UniqueName: \"kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq\") pod \"b012712b-f57a-4334-aa1c-0264aedf8e5c\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.434622 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts\") pod \"b012712b-f57a-4334-aa1c-0264aedf8e5c\" (UID: \"b012712b-f57a-4334-aa1c-0264aedf8e5c\") " Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.442503 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts" (OuterVolumeSpecName: "scripts") pod "b012712b-f57a-4334-aa1c-0264aedf8e5c" (UID: "b012712b-f57a-4334-aa1c-0264aedf8e5c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.443631 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq" (OuterVolumeSpecName: "kube-api-access-8lpwq") pod "b012712b-f57a-4334-aa1c-0264aedf8e5c" (UID: "b012712b-f57a-4334-aa1c-0264aedf8e5c"). InnerVolumeSpecName "kube-api-access-8lpwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.467616 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data" (OuterVolumeSpecName: "config-data") pod "b012712b-f57a-4334-aa1c-0264aedf8e5c" (UID: "b012712b-f57a-4334-aa1c-0264aedf8e5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.473940 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b012712b-f57a-4334-aa1c-0264aedf8e5c" (UID: "b012712b-f57a-4334-aa1c-0264aedf8e5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.539656 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.539687 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.539699 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lpwq\" (UniqueName: \"kubernetes.io/projected/b012712b-f57a-4334-aa1c-0264aedf8e5c-kube-api-access-8lpwq\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.539713 4712 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b012712b-f57a-4334-aa1c-0264aedf8e5c-scripts\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.899080 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-r72q5" event={"ID":"b012712b-f57a-4334-aa1c-0264aedf8e5c","Type":"ContainerDied","Data":"d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d"} Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.899134 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d978fd2ab99c36ea09cd177e8b10cb555231d73297c0ce4ca87641064053580d" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.899231 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-r72q5" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.907550 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e9e028b-835f-498c-a16a-88a444ee2739","Type":"ContainerStarted","Data":"f2f35bf4b12289ed88e9bce38cd9b1a46c09adc73eeb762ebee4763390697854"} Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.907791 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 31 06:04:35 crc kubenswrapper[4712]: I0131 06:04:35.947574 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.378205902 podStartE2EDuration="7.947554242s" podCreationTimestamp="2026-01-31 06:04:28 +0000 UTC" firstStartedPulling="2026-01-31 06:04:29.874108584 +0000 UTC m=+1535.967990425" lastFinishedPulling="2026-01-31 06:04:35.443456924 +0000 UTC m=+1541.537338765" observedRunningTime="2026-01-31 06:04:35.936631151 +0000 UTC m=+1542.030512992" watchObservedRunningTime="2026-01-31 06:04:35.947554242 +0000 UTC m=+1542.041436083" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.097560 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.098161 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-log" containerID="cri-o://e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" gracePeriod=30 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.098333 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-api" containerID="cri-o://1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" gracePeriod=30 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.129519 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.129820 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" containerID="cri-o://e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281" gracePeriod=30 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.130430 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" containerID="cri-o://6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7" gracePeriod=30 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.152249 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.152520 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerName="nova-scheduler-scheduler" containerID="cri-o://14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" gracePeriod=30 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.895823 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.918539 4712 generic.go:334] "Generic (PLEG): container finished" podID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerID="e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281" exitCode=143 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.918613 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerDied","Data":"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281"} Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.920822 4712 generic.go:334] "Generic (PLEG): container finished" podID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerID="1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" exitCode=0 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.920845 4712 generic.go:334] "Generic (PLEG): container finished" podID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerID="e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" exitCode=143 Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.922106 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.922276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerDied","Data":"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58"} Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.922364 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerDied","Data":"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e"} Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.922383 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2d6f5589-654a-419d-82be-e8ffd94495ca","Type":"ContainerDied","Data":"18a4bb921e8e437c113a0df9e83d67ce916a3c092fb8f0b96a8f506e8614b7aa"} Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.922437 4712 scope.go:117] "RemoveContainer" containerID="1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.959477 4712 scope.go:117] "RemoveContainer" containerID="e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.984006 4712 scope.go:117] "RemoveContainer" containerID="1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" Jan 31 06:04:36 crc kubenswrapper[4712]: E0131 06:04:36.986835 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58\": container with ID starting with 1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58 not found: ID does not exist" containerID="1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.986897 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58"} err="failed to get container status \"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58\": rpc error: code = NotFound desc = could not find container \"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58\": container with ID starting with 1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58 not found: ID does not exist" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.986957 4712 scope.go:117] "RemoveContainer" containerID="e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" Jan 31 06:04:36 crc kubenswrapper[4712]: E0131 06:04:36.987568 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e\": container with ID starting with e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e not found: ID does not exist" containerID="e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.987605 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e"} err="failed to get container status \"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e\": rpc error: code = NotFound desc = could not find container \"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e\": container with ID starting with e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e not found: ID does not exist" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.987629 4712 scope.go:117] "RemoveContainer" containerID="1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.987927 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58"} err="failed to get container status \"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58\": rpc error: code = NotFound desc = could not find container \"1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58\": container with ID starting with 1b9514c95234265c3ad7b300ed0bc0a35cf6dfe18887259450fbbcdde28e7d58 not found: ID does not exist" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.987952 4712 scope.go:117] "RemoveContainer" containerID="e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e" Jan 31 06:04:36 crc kubenswrapper[4712]: I0131 06:04:36.988825 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e"} err="failed to get container status \"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e\": rpc error: code = NotFound desc = could not find container \"e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e\": container with ID starting with e97d66b39c7c16a4e3aaca8e64ea0d298355f8eeec0f1f22bf6ec05db6f2386e not found: ID does not exist" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080072 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080146 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080203 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqllz\" (UniqueName: \"kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080251 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080413 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080483 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs\") pod \"2d6f5589-654a-419d-82be-e8ffd94495ca\" (UID: \"2d6f5589-654a-419d-82be-e8ffd94495ca\") " Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.080853 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs" (OuterVolumeSpecName: "logs") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.081061 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d6f5589-654a-419d-82be-e8ffd94495ca-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.087395 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz" (OuterVolumeSpecName: "kube-api-access-nqllz") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "kube-api-access-nqllz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.117346 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data" (OuterVolumeSpecName: "config-data") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.128133 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.145763 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.148096 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2d6f5589-654a-419d-82be-e8ffd94495ca" (UID: "2d6f5589-654a-419d-82be-e8ffd94495ca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.183191 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.183231 4712 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.183244 4712 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.183254 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d6f5589-654a-419d-82be-e8ffd94495ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.183265 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqllz\" (UniqueName: \"kubernetes.io/projected/2d6f5589-654a-419d-82be-e8ffd94495ca-kube-api-access-nqllz\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.302117 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.316562 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336032 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:37 crc kubenswrapper[4712]: E0131 06:04:37.336555 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-api" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336577 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-api" Jan 31 06:04:37 crc kubenswrapper[4712]: E0131 06:04:37.336595 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="dnsmasq-dns" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336602 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="dnsmasq-dns" Jan 31 06:04:37 crc kubenswrapper[4712]: E0131 06:04:37.336616 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-log" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336622 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-log" Jan 31 06:04:37 crc kubenswrapper[4712]: E0131 06:04:37.336634 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b012712b-f57a-4334-aa1c-0264aedf8e5c" containerName="nova-manage" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336640 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b012712b-f57a-4334-aa1c-0264aedf8e5c" containerName="nova-manage" Jan 31 06:04:37 crc kubenswrapper[4712]: E0131 06:04:37.336653 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="init" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336659 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="init" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336849 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7036754-54d9-4ca3-a12e-0ff31a7acb2a" containerName="dnsmasq-dns" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336860 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-log" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336871 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" containerName="nova-api-api" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.336879 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b012712b-f57a-4334-aa1c-0264aedf8e5c" containerName="nova-manage" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.338087 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.340333 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.340747 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.340910 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.352240 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.492627 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.492956 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.493028 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.493312 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6sfk\" (UniqueName: \"kubernetes.io/projected/e2c19455-9dc4-45de-afd3-d55b91c729c6-kube-api-access-p6sfk\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.493611 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-config-data\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.493712 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2c19455-9dc4-45de-afd3-d55b91c729c6-logs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596011 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6sfk\" (UniqueName: \"kubernetes.io/projected/e2c19455-9dc4-45de-afd3-d55b91c729c6-kube-api-access-p6sfk\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596075 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-config-data\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596103 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2c19455-9dc4-45de-afd3-d55b91c729c6-logs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596188 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596274 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.596293 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.597514 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2c19455-9dc4-45de-afd3-d55b91c729c6-logs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.600412 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.600638 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.601005 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-config-data\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.601684 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2c19455-9dc4-45de-afd3-d55b91c729c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.632852 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6sfk\" (UniqueName: \"kubernetes.io/projected/e2c19455-9dc4-45de-afd3-d55b91c729c6-kube-api-access-p6sfk\") pod \"nova-api-0\" (UID: \"e2c19455-9dc4-45de-afd3-d55b91c729c6\") " pod="openstack/nova-api-0" Jan 31 06:04:37 crc kubenswrapper[4712]: I0131 06:04:37.704899 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 31 06:04:38 crc kubenswrapper[4712]: W0131 06:04:38.211398 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2c19455_9dc4_45de_afd3_d55b91c729c6.slice/crio-6ad8513602c13d2ee616d37474e0369efd4933f96f7bf4404e5485fd9b9604cf WatchSource:0}: Error finding container 6ad8513602c13d2ee616d37474e0369efd4933f96f7bf4404e5485fd9b9604cf: Status 404 returned error can't find the container with id 6ad8513602c13d2ee616d37474e0369efd4933f96f7bf4404e5485fd9b9604cf Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.219028 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.515891 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d6f5589-654a-419d-82be-e8ffd94495ca" path="/var/lib/kubelet/pods/2d6f5589-654a-419d-82be-e8ffd94495ca/volumes" Jan 31 06:04:38 crc kubenswrapper[4712]: E0131 06:04:38.638375 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1 is running failed: container process not found" containerID="14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:04:38 crc kubenswrapper[4712]: E0131 06:04:38.638997 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1 is running failed: container process not found" containerID="14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:04:38 crc kubenswrapper[4712]: E0131 06:04:38.639372 4712 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1 is running failed: container process not found" containerID="14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 31 06:04:38 crc kubenswrapper[4712]: E0131 06:04:38.639434 4712 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerName="nova-scheduler-scheduler" Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.946836 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e2c19455-9dc4-45de-afd3-d55b91c729c6","Type":"ContainerStarted","Data":"df39db9a0f42d23e2455c9b9896f36b1fc32dbaa05980a30052355e56c5d9051"} Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.947766 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e2c19455-9dc4-45de-afd3-d55b91c729c6","Type":"ContainerStarted","Data":"6ad8513602c13d2ee616d37474e0369efd4933f96f7bf4404e5485fd9b9604cf"} Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.950561 4712 generic.go:334] "Generic (PLEG): container finished" podID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerID="14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" exitCode=0 Jan 31 06:04:38 crc kubenswrapper[4712]: I0131 06:04:38.950729 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1","Type":"ContainerDied","Data":"14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1"} Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.279900 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:50580->10.217.0.191:8775: read: connection reset by peer" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.280293 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:50590->10.217.0.191:8775: read: connection reset by peer" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.451589 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.638489 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data\") pod \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.638561 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle\") pod \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.642616 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg9fv\" (UniqueName: \"kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv\") pod \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\" (UID: \"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.672827 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv" (OuterVolumeSpecName: "kube-api-access-fg9fv") pod "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" (UID: "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1"). InnerVolumeSpecName "kube-api-access-fg9fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.696586 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data" (OuterVolumeSpecName: "config-data") pod "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" (UID: "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.705671 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" (UID: "c2af9dbf-e8b2-4236-bbef-8bb4467e60a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.747930 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.748438 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.748474 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg9fv\" (UniqueName: \"kubernetes.io/projected/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1-kube-api-access-fg9fv\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.846695 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.951079 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle\") pod \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.951145 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs\") pod \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.951243 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data\") pod \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.951369 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv2g9\" (UniqueName: \"kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9\") pod \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.951502 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs\") pod \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\" (UID: \"c2ccae9d-4c44-4ca8-96ed-44d05d37790f\") " Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.953601 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs" (OuterVolumeSpecName: "logs") pod "c2ccae9d-4c44-4ca8-96ed-44d05d37790f" (UID: "c2ccae9d-4c44-4ca8-96ed-44d05d37790f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.957817 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9" (OuterVolumeSpecName: "kube-api-access-cv2g9") pod "c2ccae9d-4c44-4ca8-96ed-44d05d37790f" (UID: "c2ccae9d-4c44-4ca8-96ed-44d05d37790f"). InnerVolumeSpecName "kube-api-access-cv2g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.968437 4712 generic.go:334] "Generic (PLEG): container finished" podID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerID="6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7" exitCode=0 Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.968528 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerDied","Data":"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7"} Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.968569 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2ccae9d-4c44-4ca8-96ed-44d05d37790f","Type":"ContainerDied","Data":"d2a0466c6e84e04ea5287cebf7e1550b1428f58ad56fb553a6975aef69762093"} Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.968589 4712 scope.go:117] "RemoveContainer" containerID="6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.968721 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.975308 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e2c19455-9dc4-45de-afd3-d55b91c729c6","Type":"ContainerStarted","Data":"43d958961600791f953bca87ceeb5f99b89b19e782bd7940fe44c5e313a2cb96"} Jan 31 06:04:39 crc kubenswrapper[4712]: I0131 06:04:39.979056 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:39.980225 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2af9dbf-e8b2-4236-bbef-8bb4467e60a1","Type":"ContainerDied","Data":"bb27a85c275f2cb75541786d8f4d8830cea52bd7261fb7ccd0d37c4b9cc83bee"} Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.001362 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data" (OuterVolumeSpecName: "config-data") pod "c2ccae9d-4c44-4ca8-96ed-44d05d37790f" (UID: "c2ccae9d-4c44-4ca8-96ed-44d05d37790f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.017469 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2ccae9d-4c44-4ca8-96ed-44d05d37790f" (UID: "c2ccae9d-4c44-4ca8-96ed-44d05d37790f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.042544 4712 scope.go:117] "RemoveContainer" containerID="e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.046666 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c2ccae9d-4c44-4ca8-96ed-44d05d37790f" (UID: "c2ccae9d-4c44-4ca8-96ed-44d05d37790f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.058763 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.058806 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv2g9\" (UniqueName: \"kubernetes.io/projected/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-kube-api-access-cv2g9\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.058827 4712 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.058841 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.058858 4712 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2ccae9d-4c44-4ca8-96ed-44d05d37790f-logs\") on node \"crc\" DevicePath \"\"" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.074434 4712 scope.go:117] "RemoveContainer" containerID="6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7" Jan 31 06:04:40 crc kubenswrapper[4712]: E0131 06:04:40.080831 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7\": container with ID starting with 6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7 not found: ID does not exist" containerID="6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.080876 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7"} err="failed to get container status \"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7\": rpc error: code = NotFound desc = could not find container \"6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7\": container with ID starting with 6dbad4f28b82d080b28af8221e1987e90fd29e2a8de421eaa178aa212453f5e7 not found: ID does not exist" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.080902 4712 scope.go:117] "RemoveContainer" containerID="e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281" Jan 31 06:04:40 crc kubenswrapper[4712]: E0131 06:04:40.081298 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281\": container with ID starting with e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281 not found: ID does not exist" containerID="e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.081320 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281"} err="failed to get container status \"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281\": rpc error: code = NotFound desc = could not find container \"e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281\": container with ID starting with e3d5698962e94322bdcd50a1448aeeaa215cb8d75cee37fbe04e4340d9037281 not found: ID does not exist" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.081332 4712 scope.go:117] "RemoveContainer" containerID="14882f672c58ea008c8528550a176500fbccaea39c9cc32e148f2eb3265076e1" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.081599 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.081570913 podStartE2EDuration="3.081570913s" podCreationTimestamp="2026-01-31 06:04:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:40.003711815 +0000 UTC m=+1546.097593676" watchObservedRunningTime="2026-01-31 06:04:40.081570913 +0000 UTC m=+1546.175452754" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.093780 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.113569 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.114834 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: E0131 06:04:40.115333 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115352 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" Jan 31 06:04:40 crc kubenswrapper[4712]: E0131 06:04:40.115380 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115389 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" Jan 31 06:04:40 crc kubenswrapper[4712]: E0131 06:04:40.115403 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerName="nova-scheduler-scheduler" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115409 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerName="nova-scheduler-scheduler" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115609 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-log" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115628 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" containerName="nova-metadata-metadata" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.115641 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" containerName="nova-scheduler-scheduler" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.116417 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.118694 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.124735 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.265854 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sg98\" (UniqueName: \"kubernetes.io/projected/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-kube-api-access-4sg98\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.265954 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-config-data\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.266127 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.311494 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.321355 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.344483 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.347750 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.350650 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.351230 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.368407 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sg98\" (UniqueName: \"kubernetes.io/projected/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-kube-api-access-4sg98\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.368767 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-config-data\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.368904 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.374282 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-config-data\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.374443 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.376831 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.388334 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sg98\" (UniqueName: \"kubernetes.io/projected/6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9-kube-api-access-4sg98\") pod \"nova-scheduler-0\" (UID: \"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9\") " pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.447447 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.471042 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.471120 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.471437 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdgt8\" (UniqueName: \"kubernetes.io/projected/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-kube-api-access-pdgt8\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.471572 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-logs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.471987 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-config-data\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.519944 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2af9dbf-e8b2-4236-bbef-8bb4467e60a1" path="/var/lib/kubelet/pods/c2af9dbf-e8b2-4236-bbef-8bb4467e60a1/volumes" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.522554 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2ccae9d-4c44-4ca8-96ed-44d05d37790f" path="/var/lib/kubelet/pods/c2ccae9d-4c44-4ca8-96ed-44d05d37790f/volumes" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.574831 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.574898 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.574987 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdgt8\" (UniqueName: \"kubernetes.io/projected/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-kube-api-access-pdgt8\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.575043 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-logs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.575143 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-config-data\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.576784 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-logs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.581847 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.581982 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.593699 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-config-data\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.596573 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdgt8\" (UniqueName: \"kubernetes.io/projected/c032ae9d-cfbd-4184-8abb-5ccb6e158a0c-kube-api-access-pdgt8\") pod \"nova-metadata-0\" (UID: \"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c\") " pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.668956 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 31 06:04:40 crc kubenswrapper[4712]: W0131 06:04:40.920461 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d55a0e1_ea9a_4236_b89d_3b09b6f6dce9.slice/crio-9d280400511d430138bdba6fdb797457be50b1ed50ee5fdccca4a5310d2d3cc3 WatchSource:0}: Error finding container 9d280400511d430138bdba6fdb797457be50b1ed50ee5fdccca4a5310d2d3cc3: Status 404 returned error can't find the container with id 9d280400511d430138bdba6fdb797457be50b1ed50ee5fdccca4a5310d2d3cc3 Jan 31 06:04:40 crc kubenswrapper[4712]: I0131 06:04:40.923610 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 31 06:04:41 crc kubenswrapper[4712]: I0131 06:04:41.011004 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9","Type":"ContainerStarted","Data":"9d280400511d430138bdba6fdb797457be50b1ed50ee5fdccca4a5310d2d3cc3"} Jan 31 06:04:41 crc kubenswrapper[4712]: I0131 06:04:41.135933 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 31 06:04:41 crc kubenswrapper[4712]: W0131 06:04:41.140007 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc032ae9d_cfbd_4184_8abb_5ccb6e158a0c.slice/crio-44d702ce53a9d917b31debb209cc54a43a949da35de0a49be5df4ee43ff1592f WatchSource:0}: Error finding container 44d702ce53a9d917b31debb209cc54a43a949da35de0a49be5df4ee43ff1592f: Status 404 returned error can't find the container with id 44d702ce53a9d917b31debb209cc54a43a949da35de0a49be5df4ee43ff1592f Jan 31 06:04:42 crc kubenswrapper[4712]: I0131 06:04:42.026569 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c","Type":"ContainerStarted","Data":"33091110b90492a510f6281718dc08ea77deb7a9afeb1563adad924fb94939ee"} Jan 31 06:04:42 crc kubenswrapper[4712]: I0131 06:04:42.026856 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c","Type":"ContainerStarted","Data":"5ec5cc6f29d0b5b7a11adb5ba18b6d424326a5882a507cde4e715b13236a4894"} Jan 31 06:04:42 crc kubenswrapper[4712]: I0131 06:04:42.026867 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c032ae9d-cfbd-4184-8abb-5ccb6e158a0c","Type":"ContainerStarted","Data":"44d702ce53a9d917b31debb209cc54a43a949da35de0a49be5df4ee43ff1592f"} Jan 31 06:04:42 crc kubenswrapper[4712]: I0131 06:04:42.027877 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9","Type":"ContainerStarted","Data":"aec6516d61f827f450fcdb022d58f2e96442d31e9cfd53735950169c1068c484"} Jan 31 06:04:42 crc kubenswrapper[4712]: I0131 06:04:42.047690 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.047671431 podStartE2EDuration="2.047671431s" podCreationTimestamp="2026-01-31 06:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:42.044952133 +0000 UTC m=+1548.138833994" watchObservedRunningTime="2026-01-31 06:04:42.047671431 +0000 UTC m=+1548.141553272" Jan 31 06:04:43 crc kubenswrapper[4712]: I0131 06:04:43.064813 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.064786997 podStartE2EDuration="3.064786997s" podCreationTimestamp="2026-01-31 06:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:04:43.055036865 +0000 UTC m=+1549.148918726" watchObservedRunningTime="2026-01-31 06:04:43.064786997 +0000 UTC m=+1549.158668858" Jan 31 06:04:45 crc kubenswrapper[4712]: I0131 06:04:45.449106 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 31 06:04:45 crc kubenswrapper[4712]: I0131 06:04:45.669768 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:04:45 crc kubenswrapper[4712]: I0131 06:04:45.669864 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 31 06:04:47 crc kubenswrapper[4712]: I0131 06:04:47.705547 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:04:47 crc kubenswrapper[4712]: I0131 06:04:47.706210 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 31 06:04:48 crc kubenswrapper[4712]: I0131 06:04:48.719369 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e2c19455-9dc4-45de-afd3-d55b91c729c6" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:48 crc kubenswrapper[4712]: I0131 06:04:48.719438 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e2c19455-9dc4-45de-afd3-d55b91c729c6" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:50 crc kubenswrapper[4712]: I0131 06:04:50.448875 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 31 06:04:50 crc kubenswrapper[4712]: I0131 06:04:50.478748 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 31 06:04:50 crc kubenswrapper[4712]: I0131 06:04:50.669489 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 31 06:04:50 crc kubenswrapper[4712]: I0131 06:04:50.672236 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 31 06:04:51 crc kubenswrapper[4712]: I0131 06:04:51.140913 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 31 06:04:51 crc kubenswrapper[4712]: I0131 06:04:51.682524 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c032ae9d-cfbd-4184-8abb-5ccb6e158a0c" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:51 crc kubenswrapper[4712]: I0131 06:04:51.682533 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c032ae9d-cfbd-4184-8abb-5ccb6e158a0c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 31 06:04:57 crc kubenswrapper[4712]: I0131 06:04:57.716862 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 31 06:04:57 crc kubenswrapper[4712]: I0131 06:04:57.719031 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 31 06:04:57 crc kubenswrapper[4712]: I0131 06:04:57.721720 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 31 06:04:57 crc kubenswrapper[4712]: I0131 06:04:57.728468 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 31 06:04:58 crc kubenswrapper[4712]: I0131 06:04:58.183005 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 31 06:04:58 crc kubenswrapper[4712]: I0131 06:04:58.190859 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 31 06:04:59 crc kubenswrapper[4712]: I0131 06:04:59.200734 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 31 06:05:00 crc kubenswrapper[4712]: I0131 06:05:00.674714 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 31 06:05:00 crc kubenswrapper[4712]: I0131 06:05:00.677410 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 31 06:05:00 crc kubenswrapper[4712]: I0131 06:05:00.680694 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 31 06:05:01 crc kubenswrapper[4712]: I0131 06:05:01.219668 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 31 06:05:09 crc kubenswrapper[4712]: I0131 06:05:09.099156 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:10 crc kubenswrapper[4712]: I0131 06:05:10.033709 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:13 crc kubenswrapper[4712]: I0131 06:05:13.893530 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="rabbitmq" containerID="cri-o://097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22" gracePeriod=604796 Jan 31 06:05:14 crc kubenswrapper[4712]: I0131 06:05:14.988345 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="rabbitmq" containerID="cri-o://7fbb1c5aef134e7040b0b7f309f2f23106e23f200a2d5fe27734ee1961e8e9f7" gracePeriod=604796 Jan 31 06:05:16 crc kubenswrapper[4712]: I0131 06:05:16.019811 4712 scope.go:117] "RemoveContainer" containerID="506652cfe05af02cd801d6b0c1b91751bb6d62ea865452355ba91f1abcb256cf" Jan 31 06:05:20 crc kubenswrapper[4712]: I0131 06:05:20.917284 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079260 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079343 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zvvh\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079410 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079443 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079534 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079600 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079659 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079706 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079742 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079769 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079810 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf\") pod \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\" (UID: \"c6bc2bf6-037a-4415-9e9a-fdae0ef54662\") " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.079843 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.080195 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.080222 4712 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.081295 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.086068 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.086819 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh" (OuterVolumeSpecName: "kube-api-access-7zvvh") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "kube-api-access-7zvvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.088325 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.088411 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.091364 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info" (OuterVolumeSpecName: "pod-info") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.133956 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data" (OuterVolumeSpecName: "config-data") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.137968 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf" (OuterVolumeSpecName: "server-conf") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184324 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zvvh\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-kube-api-access-7zvvh\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184354 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184363 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184373 4712 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184385 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184423 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184434 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184443 4712 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-server-conf\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.184451 4712 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-pod-info\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.209774 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c6bc2bf6-037a-4415-9e9a-fdae0ef54662" (UID: "c6bc2bf6-037a-4415-9e9a-fdae0ef54662"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.214731 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.285996 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.286029 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c6bc2bf6-037a-4415-9e9a-fdae0ef54662-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.422159 4712 generic.go:334] "Generic (PLEG): container finished" podID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerID="097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22" exitCode=0 Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.422221 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerDied","Data":"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22"} Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.422254 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c6bc2bf6-037a-4415-9e9a-fdae0ef54662","Type":"ContainerDied","Data":"c6af7af08ffbb5d1a7ed07fe8656b63a292d2d6b9d3ac87793a15e24d3744857"} Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.422301 4712 scope.go:117] "RemoveContainer" containerID="097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.422420 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.457528 4712 scope.go:117] "RemoveContainer" containerID="cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.498405 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.572514 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.597770 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.609042 4712 scope.go:117] "RemoveContainer" containerID="097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.609378 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:21 crc kubenswrapper[4712]: E0131 06:05:21.610144 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="rabbitmq" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.610269 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="rabbitmq" Jan 31 06:05:21 crc kubenswrapper[4712]: E0131 06:05:21.610371 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="setup-container" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.610455 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="setup-container" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.610820 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" containerName="rabbitmq" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.612565 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: E0131 06:05:21.614901 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22\": container with ID starting with 097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22 not found: ID does not exist" containerID="097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.622493 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22"} err="failed to get container status \"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22\": rpc error: code = NotFound desc = could not find container \"097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22\": container with ID starting with 097dfa94ff15162e3624276dda431483991de824baa2698108de7715c245cc22 not found: ID does not exist" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.622569 4712 scope.go:117] "RemoveContainer" containerID="cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.619396 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.619461 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jgrnb" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.618889 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.619520 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.619550 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.619588 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.620269 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.621741 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 31 06:05:21 crc kubenswrapper[4712]: E0131 06:05:21.623189 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7\": container with ID starting with cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7 not found: ID does not exist" containerID="cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.623313 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7"} err="failed to get container status \"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7\": rpc error: code = NotFound desc = could not find container \"cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7\": container with ID starting with cf1065ddf388b9f1504e32eb0098c53e7108a2ea7a81a26ecc08e9e28afd45b7 not found: ID does not exist" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702405 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702534 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702565 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6zcb\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-kube-api-access-r6zcb\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702627 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702791 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702881 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/57a64e6d-ff8a-480a-aa16-563b5b127e6f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.702957 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/57a64e6d-ff8a-480a-aa16-563b5b127e6f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.703093 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-config-data\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.703152 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.703219 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.703288 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805352 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805486 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/57a64e6d-ff8a-480a-aa16-563b5b127e6f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805536 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/57a64e6d-ff8a-480a-aa16-563b5b127e6f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805625 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-config-data\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805691 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805720 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805795 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805866 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805918 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6zcb\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-kube-api-access-r6zcb\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805944 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.805996 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.807761 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.807987 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.808391 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.808866 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-config-data\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.808908 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.813943 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.814523 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/57a64e6d-ff8a-480a-aa16-563b5b127e6f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.814681 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/57a64e6d-ff8a-480a-aa16-563b5b127e6f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.816757 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/57a64e6d-ff8a-480a-aa16-563b5b127e6f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.825755 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.842872 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6zcb\" (UniqueName: \"kubernetes.io/projected/57a64e6d-ff8a-480a-aa16-563b5b127e6f-kube-api-access-r6zcb\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.854323 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"57a64e6d-ff8a-480a-aa16-563b5b127e6f\") " pod="openstack/rabbitmq-server-0" Jan 31 06:05:21 crc kubenswrapper[4712]: I0131 06:05:21.977052 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.440134 4712 generic.go:334] "Generic (PLEG): container finished" podID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerID="7fbb1c5aef134e7040b0b7f309f2f23106e23f200a2d5fe27734ee1961e8e9f7" exitCode=0 Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.440557 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerDied","Data":"7fbb1c5aef134e7040b0b7f309f2f23106e23f200a2d5fe27734ee1961e8e9f7"} Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.517117 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6bc2bf6-037a-4415-9e9a-fdae0ef54662" path="/var/lib/kubelet/pods/c6bc2bf6-037a-4415-9e9a-fdae0ef54662/volumes" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.826657 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.833259 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952148 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76dch\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952281 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952371 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952403 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952728 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952809 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952830 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952874 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952935 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.952989 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.953019 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info\") pod \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\" (UID: \"e8488eab-54d1-405d-aa15-6f7f9a50b6a8\") " Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.954246 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.954653 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.954792 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.959035 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch" (OuterVolumeSpecName: "kube-api-access-76dch") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "kube-api-access-76dch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.959618 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.966206 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info" (OuterVolumeSpecName: "pod-info") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.966431 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:05:22 crc kubenswrapper[4712]: I0131 06:05:22.968527 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.000015 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data" (OuterVolumeSpecName: "config-data") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.038852 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf" (OuterVolumeSpecName: "server-conf") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058610 4712 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058644 4712 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-pod-info\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058654 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76dch\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-kube-api-access-76dch\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058667 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058677 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058685 4712 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058694 4712 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-server-conf\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058702 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058735 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.058743 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.083395 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.087829 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e8488eab-54d1-405d-aa15-6f7f9a50b6a8" (UID: "e8488eab-54d1-405d-aa15-6f7f9a50b6a8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.160505 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.160540 4712 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8488eab-54d1-405d-aa15-6f7f9a50b6a8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.461105 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e8488eab-54d1-405d-aa15-6f7f9a50b6a8","Type":"ContainerDied","Data":"bd66e456cdd71de02c25ec5db9cb8eadd0c463a8ed9d3b95c47be585cdb60eba"} Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.461259 4712 scope.go:117] "RemoveContainer" containerID="7fbb1c5aef134e7040b0b7f309f2f23106e23f200a2d5fe27734ee1961e8e9f7" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.461418 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.466963 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"57a64e6d-ff8a-480a-aa16-563b5b127e6f","Type":"ContainerStarted","Data":"93d04d6fb6e93cc560302c135b3144d0684cc23b3de6b8b6266e1aeb49c5c65c"} Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.536387 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:23 crc kubenswrapper[4712]: E0131 06:05:23.536994 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="setup-container" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.537022 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="setup-container" Jan 31 06:05:23 crc kubenswrapper[4712]: E0131 06:05:23.537038 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="rabbitmq" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.537050 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="rabbitmq" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.541408 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" containerName="rabbitmq" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.547314 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.550204 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.552151 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.574539 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.577901 4712 scope.go:117] "RemoveContainer" containerID="01388726fb02d90b2eb560005e90cf645b9e2dd51d9efcd0b3815fa84335cf2c" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.596285 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.630635 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.632879 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.635204 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.639312 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.639564 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.639679 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fq9qc" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.639798 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.643152 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.648400 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.687656 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.687727 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.687838 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.687928 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.688016 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.688159 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.691068 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.693223 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt5db\" (UniqueName: \"kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: E0131 06:05:23.731472 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8488eab_54d1_405d_aa15_6f7f9a50b6a8.slice/crio-bd66e456cdd71de02c25ec5db9cb8eadd0c463a8ed9d3b95c47be585cdb60eba\": RecentStats: unable to find data in memory cache]" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.795712 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.796121 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.796216 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.796322 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.796768 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.796910 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797150 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797249 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797323 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797357 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797497 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797541 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797569 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.797697 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798034 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798297 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798432 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95mhr\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-kube-api-access-95mhr\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798521 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798590 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt5db\" (UniqueName: \"kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798723 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798774 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798819 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798909 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.798935 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.819308 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt5db\" (UniqueName: \"kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db\") pod \"dnsmasq-dns-69fdb9f885-99t9f\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901219 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901326 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901357 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901383 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901413 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901462 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95mhr\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-kube-api-access-95mhr\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901522 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901553 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901588 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901646 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.901671 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.902910 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.903334 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.903420 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.903707 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.903717 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.904311 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.908556 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.908700 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.909276 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.910736 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.926694 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95mhr\" (UniqueName: \"kubernetes.io/projected/dc6391b1-c3f6-4ae8-ad8f-00572ac27b87-kube-api-access-95mhr\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.940898 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87\") " pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.956030 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:23 crc kubenswrapper[4712]: I0131 06:05:23.985453 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:05:24 crc kubenswrapper[4712]: I0131 06:05:24.490950 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"57a64e6d-ff8a-480a-aa16-563b5b127e6f","Type":"ContainerStarted","Data":"ab9a35efb7291cea64c1e9cca671c704c16d6d658f2c57150d9f0e6f015ece2f"} Jan 31 06:05:24 crc kubenswrapper[4712]: I0131 06:05:24.522032 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8488eab-54d1-405d-aa15-6f7f9a50b6a8" path="/var/lib/kubelet/pods/e8488eab-54d1-405d-aa15-6f7f9a50b6a8/volumes" Jan 31 06:05:24 crc kubenswrapper[4712]: I0131 06:05:24.536967 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:24 crc kubenswrapper[4712]: I0131 06:05:24.612451 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 31 06:05:24 crc kubenswrapper[4712]: W0131 06:05:24.614674 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc6391b1_c3f6_4ae8_ad8f_00572ac27b87.slice/crio-3836eebfae46c7d81f820ad2ea402a060da0dbbda6de1233fb9c14ae1207e62e WatchSource:0}: Error finding container 3836eebfae46c7d81f820ad2ea402a060da0dbbda6de1233fb9c14ae1207e62e: Status 404 returned error can't find the container with id 3836eebfae46c7d81f820ad2ea402a060da0dbbda6de1233fb9c14ae1207e62e Jan 31 06:05:25 crc kubenswrapper[4712]: I0131 06:05:25.504446 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87","Type":"ContainerStarted","Data":"3836eebfae46c7d81f820ad2ea402a060da0dbbda6de1233fb9c14ae1207e62e"} Jan 31 06:05:25 crc kubenswrapper[4712]: I0131 06:05:25.507700 4712 generic.go:334] "Generic (PLEG): container finished" podID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerID="814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc" exitCode=0 Jan 31 06:05:25 crc kubenswrapper[4712]: I0131 06:05:25.507756 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" event={"ID":"135f95c2-bc9d-40c2-adb6-658dbbc37e77","Type":"ContainerDied","Data":"814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc"} Jan 31 06:05:25 crc kubenswrapper[4712]: I0131 06:05:25.507805 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" event={"ID":"135f95c2-bc9d-40c2-adb6-658dbbc37e77","Type":"ContainerStarted","Data":"e9c329ba7f166b920a003626099a0e0e3fb19396c38712f53d6a94bf4589b8f3"} Jan 31 06:05:26 crc kubenswrapper[4712]: I0131 06:05:26.520522 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87","Type":"ContainerStarted","Data":"bc56b41fce23321468b158839b121beecd321afb311fa750a7a730ab3ddda090"} Jan 31 06:05:26 crc kubenswrapper[4712]: I0131 06:05:26.522743 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" event={"ID":"135f95c2-bc9d-40c2-adb6-658dbbc37e77","Type":"ContainerStarted","Data":"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130"} Jan 31 06:05:26 crc kubenswrapper[4712]: I0131 06:05:26.522951 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:26 crc kubenswrapper[4712]: I0131 06:05:26.579896 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" podStartSLOduration=3.5798526649999998 podStartE2EDuration="3.579852665s" podCreationTimestamp="2026-01-31 06:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:05:26.578097731 +0000 UTC m=+1592.671979582" watchObservedRunningTime="2026-01-31 06:05:26.579852665 +0000 UTC m=+1592.673734506" Jan 31 06:05:33 crc kubenswrapper[4712]: I0131 06:05:33.958292 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.037506 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.037806 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="dnsmasq-dns" containerID="cri-o://ee32e834fe986a5f8e2ce9c1b9142d868ea133db1b4431eef9092ca4a9998cf3" gracePeriod=10 Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.153734 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.198:5353: connect: connection refused" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.249353 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-595f5c6cbf-z979m"] Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.268664 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.316433 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595f5c6cbf-z979m"] Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.452997 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-sb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453050 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-openstack-edpm-ipam\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453079 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-svc\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453119 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdj4j\" (UniqueName: \"kubernetes.io/projected/848e784a-8596-4d55-bb70-f4a99fd14873-kube-api-access-jdj4j\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453187 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-swift-storage-0\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453210 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-nb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.453236 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-config\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.555753 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-sb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.555822 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-openstack-edpm-ipam\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.555882 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-svc\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.555935 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdj4j\" (UniqueName: \"kubernetes.io/projected/848e784a-8596-4d55-bb70-f4a99fd14873-kube-api-access-jdj4j\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.556007 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-swift-storage-0\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.556035 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-nb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.556071 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-config\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.556836 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-sb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.557239 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-config\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.558344 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-svc\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.558865 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-ovsdbserver-nb\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.559282 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-dns-swift-storage-0\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.564733 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/848e784a-8596-4d55-bb70-f4a99fd14873-openstack-edpm-ipam\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.581305 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdj4j\" (UniqueName: \"kubernetes.io/projected/848e784a-8596-4d55-bb70-f4a99fd14873-kube-api-access-jdj4j\") pod \"dnsmasq-dns-595f5c6cbf-z979m\" (UID: \"848e784a-8596-4d55-bb70-f4a99fd14873\") " pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.602249 4712 generic.go:334] "Generic (PLEG): container finished" podID="51718f5c-e32d-438f-acba-06d5a797b316" containerID="ee32e834fe986a5f8e2ce9c1b9142d868ea133db1b4431eef9092ca4a9998cf3" exitCode=0 Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.602613 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" event={"ID":"51718f5c-e32d-438f-acba-06d5a797b316","Type":"ContainerDied","Data":"ee32e834fe986a5f8e2ce9c1b9142d868ea133db1b4431eef9092ca4a9998cf3"} Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.602657 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" event={"ID":"51718f5c-e32d-438f-acba-06d5a797b316","Type":"ContainerDied","Data":"017c7bea2ad072f7d7ee8e07b07f384cf1e9a731c83ab19c1c526f62c62422e7"} Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.602673 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="017c7bea2ad072f7d7ee8e07b07f384cf1e9a731c83ab19c1c526f62c62422e7" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.605911 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.759570 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.861696 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.862092 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.862251 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.862288 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbfc4\" (UniqueName: \"kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.862364 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.862541 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb\") pod \"51718f5c-e32d-438f-acba-06d5a797b316\" (UID: \"51718f5c-e32d-438f-acba-06d5a797b316\") " Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.869428 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4" (OuterVolumeSpecName: "kube-api-access-jbfc4") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "kube-api-access-jbfc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.935822 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595f5c6cbf-z979m"] Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.943933 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.947715 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.949130 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config" (OuterVolumeSpecName: "config") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.965051 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.965089 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.965101 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.965109 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbfc4\" (UniqueName: \"kubernetes.io/projected/51718f5c-e32d-438f-acba-06d5a797b316-kube-api-access-jbfc4\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.974076 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:34 crc kubenswrapper[4712]: I0131 06:05:34.975482 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "51718f5c-e32d-438f-acba-06d5a797b316" (UID: "51718f5c-e32d-438f-acba-06d5a797b316"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.066849 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.066881 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51718f5c-e32d-438f-acba-06d5a797b316-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.613764 4712 generic.go:334] "Generic (PLEG): container finished" podID="848e784a-8596-4d55-bb70-f4a99fd14873" containerID="5601607b0bb5d34240244bec9a9cf62e0fb9d541bbd085eca0d905ff893454f7" exitCode=0 Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.613860 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" event={"ID":"848e784a-8596-4d55-bb70-f4a99fd14873","Type":"ContainerDied","Data":"5601607b0bb5d34240244bec9a9cf62e0fb9d541bbd085eca0d905ff893454f7"} Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.614274 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9b6b4b89-4tz4m" Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.614320 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" event={"ID":"848e784a-8596-4d55-bb70-f4a99fd14873","Type":"ContainerStarted","Data":"6482f0298501495d2a92b80a48c1322621e9e44c1f61da4dde4187805ae554bc"} Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.820378 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:05:35 crc kubenswrapper[4712]: I0131 06:05:35.829912 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b9b6b4b89-4tz4m"] Jan 31 06:05:36 crc kubenswrapper[4712]: I0131 06:05:36.515426 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51718f5c-e32d-438f-acba-06d5a797b316" path="/var/lib/kubelet/pods/51718f5c-e32d-438f-acba-06d5a797b316/volumes" Jan 31 06:05:36 crc kubenswrapper[4712]: I0131 06:05:36.624381 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" event={"ID":"848e784a-8596-4d55-bb70-f4a99fd14873","Type":"ContainerStarted","Data":"36a0a28e3a3c7ced9682ef02b5882b09216d6b1a468b4f1fa4a3704a6ce1b5a7"} Jan 31 06:05:36 crc kubenswrapper[4712]: I0131 06:05:36.624544 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:36 crc kubenswrapper[4712]: I0131 06:05:36.651572 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" podStartSLOduration=2.651546289 podStartE2EDuration="2.651546289s" podCreationTimestamp="2026-01-31 06:05:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:05:36.640590029 +0000 UTC m=+1602.734471870" watchObservedRunningTime="2026-01-31 06:05:36.651546289 +0000 UTC m=+1602.745428130" Jan 31 06:05:44 crc kubenswrapper[4712]: I0131 06:05:44.607829 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-595f5c6cbf-z979m" Jan 31 06:05:44 crc kubenswrapper[4712]: I0131 06:05:44.700746 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:44 crc kubenswrapper[4712]: I0131 06:05:44.701215 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="dnsmasq-dns" containerID="cri-o://28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130" gracePeriod=10 Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.302123 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.328812 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.328919 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt5db\" (UniqueName: \"kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.328975 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.328993 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.329012 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.329088 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.329138 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb\") pod \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\" (UID: \"135f95c2-bc9d-40c2-adb6-658dbbc37e77\") " Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.335568 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db" (OuterVolumeSpecName: "kube-api-access-kt5db") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "kube-api-access-kt5db". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.436075 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt5db\" (UniqueName: \"kubernetes.io/projected/135f95c2-bc9d-40c2-adb6-658dbbc37e77-kube-api-access-kt5db\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.485441 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.502881 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.518833 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.522970 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config" (OuterVolumeSpecName: "config") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.529920 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.537709 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "135f95c2-bc9d-40c2-adb6-658dbbc37e77" (UID: "135f95c2-bc9d-40c2-adb6-658dbbc37e77"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553916 4712 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553963 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553973 4712 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553982 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553990 4712 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.553999 4712 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/135f95c2-bc9d-40c2-adb6-658dbbc37e77-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.756420 4712 generic.go:334] "Generic (PLEG): container finished" podID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerID="28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130" exitCode=0 Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.756480 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" event={"ID":"135f95c2-bc9d-40c2-adb6-658dbbc37e77","Type":"ContainerDied","Data":"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130"} Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.756512 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.756534 4712 scope.go:117] "RemoveContainer" containerID="28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.756518 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69fdb9f885-99t9f" event={"ID":"135f95c2-bc9d-40c2-adb6-658dbbc37e77","Type":"ContainerDied","Data":"e9c329ba7f166b920a003626099a0e0e3fb19396c38712f53d6a94bf4589b8f3"} Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.778069 4712 scope.go:117] "RemoveContainer" containerID="814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.797254 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.806311 4712 scope.go:117] "RemoveContainer" containerID="28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130" Jan 31 06:05:45 crc kubenswrapper[4712]: E0131 06:05:45.806773 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130\": container with ID starting with 28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130 not found: ID does not exist" containerID="28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.806809 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130"} err="failed to get container status \"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130\": rpc error: code = NotFound desc = could not find container \"28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130\": container with ID starting with 28191cc407043321a07f724db2aab23e8f8ce31074885476c1c9616d729cf130 not found: ID does not exist" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.806837 4712 scope.go:117] "RemoveContainer" containerID="814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc" Jan 31 06:05:45 crc kubenswrapper[4712]: E0131 06:05:45.807049 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc\": container with ID starting with 814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc not found: ID does not exist" containerID="814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.807071 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc"} err="failed to get container status \"814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc\": rpc error: code = NotFound desc = could not find container \"814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc\": container with ID starting with 814ca06dd0ad552c43c4d73d867f2ef78ad5d4811d61f72ce881c3b1b35c4edc not found: ID does not exist" Jan 31 06:05:45 crc kubenswrapper[4712]: I0131 06:05:45.807700 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69fdb9f885-99t9f"] Jan 31 06:05:46 crc kubenswrapper[4712]: I0131 06:05:46.515132 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" path="/var/lib/kubelet/pods/135f95c2-bc9d-40c2-adb6-658dbbc37e77/volumes" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.880749 4712 generic.go:334] "Generic (PLEG): container finished" podID="57a64e6d-ff8a-480a-aa16-563b5b127e6f" containerID="ab9a35efb7291cea64c1e9cca671c704c16d6d658f2c57150d9f0e6f015ece2f" exitCode=0 Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.880817 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"57a64e6d-ff8a-480a-aa16-563b5b127e6f","Type":"ContainerDied","Data":"ab9a35efb7291cea64c1e9cca671c704c16d6d658f2c57150d9f0e6f015ece2f"} Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.918883 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl"] Jan 31 06:05:56 crc kubenswrapper[4712]: E0131 06:05:56.919397 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="init" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919414 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="init" Jan 31 06:05:56 crc kubenswrapper[4712]: E0131 06:05:56.919444 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919452 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: E0131 06:05:56.919470 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919479 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: E0131 06:05:56.919488 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="init" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919496 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="init" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919711 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="51718f5c-e32d-438f-acba-06d5a797b316" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.919742 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="135f95c2-bc9d-40c2-adb6-658dbbc37e77" containerName="dnsmasq-dns" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.920512 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.924950 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.925278 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.925447 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.925954 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:05:56 crc kubenswrapper[4712]: I0131 06:05:56.948324 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl"] Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.015027 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.015131 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.015208 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tpw8\" (UniqueName: \"kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.015522 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.118062 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.118140 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tpw8\" (UniqueName: \"kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.118237 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.118351 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.124013 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.124037 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.124228 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.136109 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tpw8\" (UniqueName: \"kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:05:57 crc kubenswrapper[4712]: I0131 06:05:57.398385 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:57.912465 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"57a64e6d-ff8a-480a-aa16-563b5b127e6f","Type":"ContainerStarted","Data":"ee83ade7c7c33a61c4c70ee50a2e43eb8bc2c9ae3b3f02d49a4886b0cbdadc9c"} Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:57.914111 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:57.953522 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.953496404 podStartE2EDuration="36.953496404s" podCreationTimestamp="2026-01-31 06:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:05:57.941853005 +0000 UTC m=+1624.035734846" watchObservedRunningTime="2026-01-31 06:05:57.953496404 +0000 UTC m=+1624.047378245" Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:57.989714 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl"] Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:58.925125 4712 generic.go:334] "Generic (PLEG): container finished" podID="dc6391b1-c3f6-4ae8-ad8f-00572ac27b87" containerID="bc56b41fce23321468b158839b121beecd321afb311fa750a7a730ab3ddda090" exitCode=0 Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:58.925231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87","Type":"ContainerDied","Data":"bc56b41fce23321468b158839b121beecd321afb311fa750a7a730ab3ddda090"} Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:58.927746 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" event={"ID":"b7519335-f8e2-4211-8b99-a9fc3ac51150","Type":"ContainerStarted","Data":"2eec6f73fc21c882b0d20b32b6c2a23bbe3857e6713fe3c2b0675af7c501237a"} Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:59.945864 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dc6391b1-c3f6-4ae8-ad8f-00572ac27b87","Type":"ContainerStarted","Data":"15b06d3f876fb1346ad308feb50be0a9bc0eb795fc06e0823d3f141e95357341"} Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:59.946799 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:06:00 crc kubenswrapper[4712]: I0131 06:05:59.983301 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.983276107 podStartE2EDuration="36.983276107s" podCreationTimestamp="2026-01-31 06:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:05:59.974292265 +0000 UTC m=+1626.068174116" watchObservedRunningTime="2026-01-31 06:05:59.983276107 +0000 UTC m=+1626.077157958" Jan 31 06:06:11 crc kubenswrapper[4712]: I0131 06:06:11.979570 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="57a64e6d-ff8a-480a-aa16-563b5b127e6f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.205:5671: connect: connection refused" Jan 31 06:06:13 crc kubenswrapper[4712]: I0131 06:06:13.987683 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="dc6391b1-c3f6-4ae8-ad8f-00572ac27b87" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.207:5671: connect: connection refused" Jan 31 06:06:14 crc kubenswrapper[4712]: E0131 06:06:14.849327 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:9d2f107ddcf79172d7d1c8409c51134b63baf7f3a4c98a5d48cd8a3ef4007d02" Jan 31 06:06:14 crc kubenswrapper[4712]: E0131 06:06:14.849531 4712 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 31 06:06:14 crc kubenswrapper[4712]: container &Container{Name:repo-setup-edpm-deployment-openstack-edpm-ipam,Image:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:9d2f107ddcf79172d7d1c8409c51134b63baf7f3a4c98a5d48cd8a3ef4007d02,Command:[],Args:[ansible-runner run /runner -p playbook.yaml -i repo-setup-edpm-deployment-openstack-edpm-ipam],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ANSIBLE_VERBOSITY,Value:2,ValueFrom:nil,},EnvVar{Name:RUNNER_PLAYBOOK,Value: Jan 31 06:06:14 crc kubenswrapper[4712]: - hosts: all Jan 31 06:06:14 crc kubenswrapper[4712]: strategy: linear Jan 31 06:06:14 crc kubenswrapper[4712]: tasks: Jan 31 06:06:14 crc kubenswrapper[4712]: - name: Enable podified-repos Jan 31 06:06:14 crc kubenswrapper[4712]: become: true Jan 31 06:06:14 crc kubenswrapper[4712]: ansible.builtin.shell: | Jan 31 06:06:14 crc kubenswrapper[4712]: set -euxo pipefail Jan 31 06:06:14 crc kubenswrapper[4712]: pushd /var/tmp Jan 31 06:06:14 crc kubenswrapper[4712]: curl -sL https://github.com/openstack-k8s-operators/repo-setup/archive/refs/heads/main.tar.gz | tar -xz Jan 31 06:06:14 crc kubenswrapper[4712]: pushd repo-setup-main Jan 31 06:06:14 crc kubenswrapper[4712]: python3 -m venv ./venv Jan 31 06:06:14 crc kubenswrapper[4712]: PBR_VERSION=0.0.0 ./venv/bin/pip install ./ Jan 31 06:06:14 crc kubenswrapper[4712]: ./venv/bin/repo-setup current-podified -b antelope Jan 31 06:06:14 crc kubenswrapper[4712]: popd Jan 31 06:06:14 crc kubenswrapper[4712]: rm -rf repo-setup-main Jan 31 06:06:14 crc kubenswrapper[4712]: Jan 31 06:06:14 crc kubenswrapper[4712]: Jan 31 06:06:14 crc kubenswrapper[4712]: ,ValueFrom:nil,},EnvVar{Name:RUNNER_EXTRA_VARS,Value: Jan 31 06:06:14 crc kubenswrapper[4712]: edpm_override_hosts: openstack-edpm-ipam Jan 31 06:06:14 crc kubenswrapper[4712]: edpm_service_type: repo-setup Jan 31 06:06:14 crc kubenswrapper[4712]: Jan 31 06:06:14 crc kubenswrapper[4712]: Jan 31 06:06:14 crc kubenswrapper[4712]: ,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:repo-setup-combined-ca-bundle,ReadOnly:false,MountPath:/var/lib/openstack/cacerts/repo-setup,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key-openstack-edpm-ipam,ReadOnly:false,MountPath:/runner/env/ssh_key/ssh_key_openstack-edpm-ipam,SubPath:ssh_key_openstack-edpm-ipam,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:inventory,ReadOnly:false,MountPath:/runner/inventory/hosts,SubPath:inventory,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8tpw8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:openstack-aee-default-env,},Optional:*true,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl_openstack(b7519335-f8e2-4211-8b99-a9fc3ac51150): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Jan 31 06:06:14 crc kubenswrapper[4712]: > logger="UnhandledError" Jan 31 06:06:14 crc kubenswrapper[4712]: E0131 06:06:14.850878 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" podUID="b7519335-f8e2-4211-8b99-a9fc3ac51150" Jan 31 06:06:15 crc kubenswrapper[4712]: E0131 06:06:15.108265 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:9d2f107ddcf79172d7d1c8409c51134b63baf7f3a4c98a5d48cd8a3ef4007d02\\\"\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" podUID="b7519335-f8e2-4211-8b99-a9fc3ac51150" Jan 31 06:06:16 crc kubenswrapper[4712]: I0131 06:06:16.164931 4712 scope.go:117] "RemoveContainer" containerID="8fd86bc66e5b9d680d2db6f26ab4ec04eb24a9522cfe0d2e09698094fc1a9118" Jan 31 06:06:16 crc kubenswrapper[4712]: I0131 06:06:16.204373 4712 scope.go:117] "RemoveContainer" containerID="4f6934af4111a29ecd434319d69bb3c8c1dbdff0c730c950b5d033cca184deff" Jan 31 06:06:16 crc kubenswrapper[4712]: I0131 06:06:16.245429 4712 scope.go:117] "RemoveContainer" containerID="15e1ec8fd5f53f590e86a9d72b74186a3bfd6ef9a4cbf179b935574ef1602022" Jan 31 06:06:21 crc kubenswrapper[4712]: I0131 06:06:21.979820 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 31 06:06:23 crc kubenswrapper[4712]: I0131 06:06:23.988428 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 31 06:06:28 crc kubenswrapper[4712]: I0131 06:06:28.242188 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" event={"ID":"b7519335-f8e2-4211-8b99-a9fc3ac51150","Type":"ContainerStarted","Data":"62c233aba05db39fcf603552934e355a9feda0e6385743077cfda6489fea8d3b"} Jan 31 06:06:28 crc kubenswrapper[4712]: I0131 06:06:28.262923 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" podStartSLOduration=3.110853035 podStartE2EDuration="32.262898908s" podCreationTimestamp="2026-01-31 06:05:56 +0000 UTC" firstStartedPulling="2026-01-31 06:05:57.993905624 +0000 UTC m=+1624.087787465" lastFinishedPulling="2026-01-31 06:06:27.145951497 +0000 UTC m=+1653.239833338" observedRunningTime="2026-01-31 06:06:28.261807101 +0000 UTC m=+1654.355688972" watchObservedRunningTime="2026-01-31 06:06:28.262898908 +0000 UTC m=+1654.356780759" Jan 31 06:06:42 crc kubenswrapper[4712]: I0131 06:06:42.381822 4712 generic.go:334] "Generic (PLEG): container finished" podID="b7519335-f8e2-4211-8b99-a9fc3ac51150" containerID="62c233aba05db39fcf603552934e355a9feda0e6385743077cfda6489fea8d3b" exitCode=0 Jan 31 06:06:42 crc kubenswrapper[4712]: I0131 06:06:42.382302 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" event={"ID":"b7519335-f8e2-4211-8b99-a9fc3ac51150","Type":"ContainerDied","Data":"62c233aba05db39fcf603552934e355a9feda0e6385743077cfda6489fea8d3b"} Jan 31 06:06:42 crc kubenswrapper[4712]: I0131 06:06:42.499378 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:06:42 crc kubenswrapper[4712]: I0131 06:06:42.499684 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.871579 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.908834 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory\") pod \"b7519335-f8e2-4211-8b99-a9fc3ac51150\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.910368 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tpw8\" (UniqueName: \"kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8\") pod \"b7519335-f8e2-4211-8b99-a9fc3ac51150\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.910546 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam\") pod \"b7519335-f8e2-4211-8b99-a9fc3ac51150\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.910642 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle\") pod \"b7519335-f8e2-4211-8b99-a9fc3ac51150\" (UID: \"b7519335-f8e2-4211-8b99-a9fc3ac51150\") " Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.918959 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b7519335-f8e2-4211-8b99-a9fc3ac51150" (UID: "b7519335-f8e2-4211-8b99-a9fc3ac51150"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.919102 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8" (OuterVolumeSpecName: "kube-api-access-8tpw8") pod "b7519335-f8e2-4211-8b99-a9fc3ac51150" (UID: "b7519335-f8e2-4211-8b99-a9fc3ac51150"). InnerVolumeSpecName "kube-api-access-8tpw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.941940 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory" (OuterVolumeSpecName: "inventory") pod "b7519335-f8e2-4211-8b99-a9fc3ac51150" (UID: "b7519335-f8e2-4211-8b99-a9fc3ac51150"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:06:43 crc kubenswrapper[4712]: I0131 06:06:43.949675 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b7519335-f8e2-4211-8b99-a9fc3ac51150" (UID: "b7519335-f8e2-4211-8b99-a9fc3ac51150"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.014191 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tpw8\" (UniqueName: \"kubernetes.io/projected/b7519335-f8e2-4211-8b99-a9fc3ac51150-kube-api-access-8tpw8\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.014233 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.014247 4712 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.014259 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7519335-f8e2-4211-8b99-a9fc3ac51150-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.406572 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" event={"ID":"b7519335-f8e2-4211-8b99-a9fc3ac51150","Type":"ContainerDied","Data":"2eec6f73fc21c882b0d20b32b6c2a23bbe3857e6713fe3c2b0675af7c501237a"} Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.407152 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eec6f73fc21c882b0d20b32b6c2a23bbe3857e6713fe3c2b0675af7c501237a" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.406688 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.519421 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8"] Jan 31 06:06:44 crc kubenswrapper[4712]: E0131 06:06:44.520071 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7519335-f8e2-4211-8b99-a9fc3ac51150" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.520160 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7519335-f8e2-4211-8b99-a9fc3ac51150" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.520577 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7519335-f8e2-4211-8b99-a9fc3ac51150" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.521582 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.526970 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.528106 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.528536 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.528959 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.534539 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8"] Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.629516 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk9hf\" (UniqueName: \"kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.629638 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.629701 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.731815 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk9hf\" (UniqueName: \"kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.731889 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.731932 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.737668 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.739618 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.752648 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk9hf\" (UniqueName: \"kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-drvv8\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:44 crc kubenswrapper[4712]: I0131 06:06:44.846949 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:45 crc kubenswrapper[4712]: I0131 06:06:45.446612 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8"] Jan 31 06:06:46 crc kubenswrapper[4712]: I0131 06:06:46.434755 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" event={"ID":"76c5b162-1ced-457b-90f1-fbf85edf746d","Type":"ContainerStarted","Data":"bec652a806d28eeb495ca910adac04ad49de2ecd8bab6d31cf07a11454142319"} Jan 31 06:06:46 crc kubenswrapper[4712]: I0131 06:06:46.435590 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" event={"ID":"76c5b162-1ced-457b-90f1-fbf85edf746d","Type":"ContainerStarted","Data":"e66b95d9e3dd40da75bacf6179c868aef147237e7ab946ade63e4c7924789466"} Jan 31 06:06:46 crc kubenswrapper[4712]: I0131 06:06:46.458092 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" podStartSLOduration=1.982833624 podStartE2EDuration="2.458064337s" podCreationTimestamp="2026-01-31 06:06:44 +0000 UTC" firstStartedPulling="2026-01-31 06:06:45.452301691 +0000 UTC m=+1671.546183522" lastFinishedPulling="2026-01-31 06:06:45.927532394 +0000 UTC m=+1672.021414235" observedRunningTime="2026-01-31 06:06:46.451008532 +0000 UTC m=+1672.544890383" watchObservedRunningTime="2026-01-31 06:06:46.458064337 +0000 UTC m=+1672.551946178" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.287803 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.291552 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.302848 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.331730 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49hp4\" (UniqueName: \"kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.331919 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.331992 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.433870 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.433968 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.434049 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49hp4\" (UniqueName: \"kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.434702 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.434736 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.464788 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49hp4\" (UniqueName: \"kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4\") pod \"redhat-marketplace-pv4rt\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.468667 4712 generic.go:334] "Generic (PLEG): container finished" podID="76c5b162-1ced-457b-90f1-fbf85edf746d" containerID="bec652a806d28eeb495ca910adac04ad49de2ecd8bab6d31cf07a11454142319" exitCode=0 Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.468709 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" event={"ID":"76c5b162-1ced-457b-90f1-fbf85edf746d","Type":"ContainerDied","Data":"bec652a806d28eeb495ca910adac04ad49de2ecd8bab6d31cf07a11454142319"} Jan 31 06:06:49 crc kubenswrapper[4712]: I0131 06:06:49.614306 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.112809 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.483527 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed6fce71-b862-4139-98a4-c067162724b4" containerID="c26774f696afd8c743ab12bf76f0b4287a8cfadb94b3ff37c58cb1941e25894f" exitCode=0 Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.484737 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerDied","Data":"c26774f696afd8c743ab12bf76f0b4287a8cfadb94b3ff37c58cb1941e25894f"} Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.484769 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerStarted","Data":"ad7951c2c4f9fb97b388fa172df75030a411d29997694cc7364ffa3fa8438736"} Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.862055 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.868596 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk9hf\" (UniqueName: \"kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf\") pod \"76c5b162-1ced-457b-90f1-fbf85edf746d\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.868861 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam\") pod \"76c5b162-1ced-457b-90f1-fbf85edf746d\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.868932 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory\") pod \"76c5b162-1ced-457b-90f1-fbf85edf746d\" (UID: \"76c5b162-1ced-457b-90f1-fbf85edf746d\") " Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.877459 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf" (OuterVolumeSpecName: "kube-api-access-mk9hf") pod "76c5b162-1ced-457b-90f1-fbf85edf746d" (UID: "76c5b162-1ced-457b-90f1-fbf85edf746d"). InnerVolumeSpecName "kube-api-access-mk9hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.905897 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory" (OuterVolumeSpecName: "inventory") pod "76c5b162-1ced-457b-90f1-fbf85edf746d" (UID: "76c5b162-1ced-457b-90f1-fbf85edf746d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.916163 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "76c5b162-1ced-457b-90f1-fbf85edf746d" (UID: "76c5b162-1ced-457b-90f1-fbf85edf746d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.972582 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk9hf\" (UniqueName: \"kubernetes.io/projected/76c5b162-1ced-457b-90f1-fbf85edf746d-kube-api-access-mk9hf\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.972616 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:50 crc kubenswrapper[4712]: I0131 06:06:50.972626 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76c5b162-1ced-457b-90f1-fbf85edf746d-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.495021 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" event={"ID":"76c5b162-1ced-457b-90f1-fbf85edf746d","Type":"ContainerDied","Data":"e66b95d9e3dd40da75bacf6179c868aef147237e7ab946ade63e4c7924789466"} Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.496322 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e66b95d9e3dd40da75bacf6179c868aef147237e7ab946ade63e4c7924789466" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.495127 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-drvv8" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.591087 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662"] Jan 31 06:06:51 crc kubenswrapper[4712]: E0131 06:06:51.591716 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c5b162-1ced-457b-90f1-fbf85edf746d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.591743 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c5b162-1ced-457b-90f1-fbf85edf746d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.592004 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c5b162-1ced-457b-90f1-fbf85edf746d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.592943 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.596040 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.596323 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.597113 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.597543 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.605201 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662"] Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.689294 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.689376 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxwph\" (UniqueName: \"kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.689397 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.689437 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.792022 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.792519 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxwph\" (UniqueName: \"kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.792633 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.792767 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.797828 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.797824 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.798597 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.809230 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxwph\" (UniqueName: \"kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d9662\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:51 crc kubenswrapper[4712]: I0131 06:06:51.918563 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:06:52 crc kubenswrapper[4712]: I0131 06:06:52.475597 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662"] Jan 31 06:06:52 crc kubenswrapper[4712]: W0131 06:06:52.479249 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd61c9236_0514_4d46_b7a7_49f8d5e63685.slice/crio-cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8 WatchSource:0}: Error finding container cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8: Status 404 returned error can't find the container with id cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8 Jan 31 06:06:52 crc kubenswrapper[4712]: I0131 06:06:52.509518 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed6fce71-b862-4139-98a4-c067162724b4" containerID="e55547bedefbb295810232610dc5280435ff671522b786c8ec6dfaa503cd0def" exitCode=0 Jan 31 06:06:52 crc kubenswrapper[4712]: I0131 06:06:52.516320 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerDied","Data":"e55547bedefbb295810232610dc5280435ff671522b786c8ec6dfaa503cd0def"} Jan 31 06:06:52 crc kubenswrapper[4712]: I0131 06:06:52.516366 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" event={"ID":"d61c9236-0514-4d46-b7a7-49f8d5e63685","Type":"ContainerStarted","Data":"cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8"} Jan 31 06:06:55 crc kubenswrapper[4712]: I0131 06:06:55.561226 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" event={"ID":"d61c9236-0514-4d46-b7a7-49f8d5e63685","Type":"ContainerStarted","Data":"a69d4a51555c64711113e4e101d6e2c350139700a4ca76cf81107863995a57e5"} Jan 31 06:06:55 crc kubenswrapper[4712]: I0131 06:06:55.569394 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerStarted","Data":"02aa83e0d7baa9948990e62bd063a0c60cf6a5655df7dd512ebba736fc062fa9"} Jan 31 06:06:55 crc kubenswrapper[4712]: I0131 06:06:55.585856 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" podStartSLOduration=3.086761317 podStartE2EDuration="4.585832293s" podCreationTimestamp="2026-01-31 06:06:51 +0000 UTC" firstStartedPulling="2026-01-31 06:06:52.482524991 +0000 UTC m=+1678.576406832" lastFinishedPulling="2026-01-31 06:06:53.981595957 +0000 UTC m=+1680.075477808" observedRunningTime="2026-01-31 06:06:55.580015752 +0000 UTC m=+1681.673897613" watchObservedRunningTime="2026-01-31 06:06:55.585832293 +0000 UTC m=+1681.679714134" Jan 31 06:06:55 crc kubenswrapper[4712]: I0131 06:06:55.608918 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pv4rt" podStartSLOduration=2.5404819339999998 podStartE2EDuration="6.608892001s" podCreationTimestamp="2026-01-31 06:06:49 +0000 UTC" firstStartedPulling="2026-01-31 06:06:50.485733884 +0000 UTC m=+1676.579615735" lastFinishedPulling="2026-01-31 06:06:54.554143951 +0000 UTC m=+1680.648025802" observedRunningTime="2026-01-31 06:06:55.601566414 +0000 UTC m=+1681.695448255" watchObservedRunningTime="2026-01-31 06:06:55.608892001 +0000 UTC m=+1681.702773842" Jan 31 06:06:59 crc kubenswrapper[4712]: I0131 06:06:59.614529 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:59 crc kubenswrapper[4712]: I0131 06:06:59.615301 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:06:59 crc kubenswrapper[4712]: I0131 06:06:59.663592 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:07:00 crc kubenswrapper[4712]: I0131 06:07:00.662819 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:07:00 crc kubenswrapper[4712]: I0131 06:07:00.714808 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:07:02 crc kubenswrapper[4712]: I0131 06:07:02.631611 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pv4rt" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="registry-server" containerID="cri-o://02aa83e0d7baa9948990e62bd063a0c60cf6a5655df7dd512ebba736fc062fa9" gracePeriod=2 Jan 31 06:07:03 crc kubenswrapper[4712]: I0131 06:07:03.642554 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed6fce71-b862-4139-98a4-c067162724b4" containerID="02aa83e0d7baa9948990e62bd063a0c60cf6a5655df7dd512ebba736fc062fa9" exitCode=0 Jan 31 06:07:03 crc kubenswrapper[4712]: I0131 06:07:03.642604 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerDied","Data":"02aa83e0d7baa9948990e62bd063a0c60cf6a5655df7dd512ebba736fc062fa9"} Jan 31 06:07:04 crc kubenswrapper[4712]: I0131 06:07:04.895430 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:07:04 crc kubenswrapper[4712]: I0131 06:07:04.999890 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content\") pod \"ed6fce71-b862-4139-98a4-c067162724b4\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.000025 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49hp4\" (UniqueName: \"kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4\") pod \"ed6fce71-b862-4139-98a4-c067162724b4\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.000117 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities\") pod \"ed6fce71-b862-4139-98a4-c067162724b4\" (UID: \"ed6fce71-b862-4139-98a4-c067162724b4\") " Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.001498 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities" (OuterVolumeSpecName: "utilities") pod "ed6fce71-b862-4139-98a4-c067162724b4" (UID: "ed6fce71-b862-4139-98a4-c067162724b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.007461 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4" (OuterVolumeSpecName: "kube-api-access-49hp4") pod "ed6fce71-b862-4139-98a4-c067162724b4" (UID: "ed6fce71-b862-4139-98a4-c067162724b4"). InnerVolumeSpecName "kube-api-access-49hp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.032310 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed6fce71-b862-4139-98a4-c067162724b4" (UID: "ed6fce71-b862-4139-98a4-c067162724b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.103157 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49hp4\" (UniqueName: \"kubernetes.io/projected/ed6fce71-b862-4139-98a4-c067162724b4-kube-api-access-49hp4\") on node \"crc\" DevicePath \"\"" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.103219 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.103238 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed6fce71-b862-4139-98a4-c067162724b4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.665348 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pv4rt" event={"ID":"ed6fce71-b862-4139-98a4-c067162724b4","Type":"ContainerDied","Data":"ad7951c2c4f9fb97b388fa172df75030a411d29997694cc7364ffa3fa8438736"} Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.665430 4712 scope.go:117] "RemoveContainer" containerID="02aa83e0d7baa9948990e62bd063a0c60cf6a5655df7dd512ebba736fc062fa9" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.665811 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pv4rt" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.686449 4712 scope.go:117] "RemoveContainer" containerID="e55547bedefbb295810232610dc5280435ff671522b786c8ec6dfaa503cd0def" Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.708682 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.718645 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pv4rt"] Jan 31 06:07:05 crc kubenswrapper[4712]: I0131 06:07:05.736894 4712 scope.go:117] "RemoveContainer" containerID="c26774f696afd8c743ab12bf76f0b4287a8cfadb94b3ff37c58cb1941e25894f" Jan 31 06:07:06 crc kubenswrapper[4712]: I0131 06:07:06.519923 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed6fce71-b862-4139-98a4-c067162724b4" path="/var/lib/kubelet/pods/ed6fce71-b862-4139-98a4-c067162724b4/volumes" Jan 31 06:07:12 crc kubenswrapper[4712]: I0131 06:07:12.497268 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:07:12 crc kubenswrapper[4712]: I0131 06:07:12.497969 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:07:16 crc kubenswrapper[4712]: I0131 06:07:16.447083 4712 scope.go:117] "RemoveContainer" containerID="b50f595a85b8cb84f8f85f0f4729e5bdcd66eb3c14f8162c3298593ac6b72b10" Jan 31 06:07:16 crc kubenswrapper[4712]: I0131 06:07:16.476740 4712 scope.go:117] "RemoveContainer" containerID="a3c1890c424c0ba6213feb4be1b0330b91b4dd05de460c5e563d4f94724b179a" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.870457 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v94kc"] Jan 31 06:07:40 crc kubenswrapper[4712]: E0131 06:07:40.871551 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="extract-utilities" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.871567 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="extract-utilities" Jan 31 06:07:40 crc kubenswrapper[4712]: E0131 06:07:40.871589 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="extract-content" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.871595 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="extract-content" Jan 31 06:07:40 crc kubenswrapper[4712]: E0131 06:07:40.871614 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="registry-server" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.871620 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="registry-server" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.871826 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6fce71-b862-4139-98a4-c067162724b4" containerName="registry-server" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.873258 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:40 crc kubenswrapper[4712]: I0131 06:07:40.884721 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v94kc"] Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.063073 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-catalog-content\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.063437 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8ksv\" (UniqueName: \"kubernetes.io/projected/08bb8992-f20d-4480-9126-1793aa64b210-kube-api-access-c8ksv\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.063476 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-utilities\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.165679 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-catalog-content\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.165737 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8ksv\" (UniqueName: \"kubernetes.io/projected/08bb8992-f20d-4480-9126-1793aa64b210-kube-api-access-c8ksv\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.165768 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-utilities\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.166690 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-catalog-content\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.167202 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bb8992-f20d-4480-9126-1793aa64b210-utilities\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.198499 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8ksv\" (UniqueName: \"kubernetes.io/projected/08bb8992-f20d-4480-9126-1793aa64b210-kube-api-access-c8ksv\") pod \"community-operators-v94kc\" (UID: \"08bb8992-f20d-4480-9126-1793aa64b210\") " pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.208060 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:07:41 crc kubenswrapper[4712]: I0131 06:07:41.753811 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v94kc"] Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.012364 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v94kc" event={"ID":"08bb8992-f20d-4480-9126-1793aa64b210","Type":"ContainerStarted","Data":"050dd33fbf7e913d41bd2fe4cbf37ef05fed8d2e6c9d7fde31bf20b82ea04f7e"} Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.497739 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.497814 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.497901 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.498568 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:07:42 crc kubenswrapper[4712]: I0131 06:07:42.498644 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" gracePeriod=600 Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.028156 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" exitCode=0 Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.028207 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453"} Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.028266 4712 scope.go:117] "RemoveContainer" containerID="73eb5805d5b1bc38b6b568e991a2ca4d8d641189ec28507e068fbd8ff0272f37" Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.030672 4712 generic.go:334] "Generic (PLEG): container finished" podID="08bb8992-f20d-4480-9126-1793aa64b210" containerID="fb252b2e864117692fe8c44167b67d7b4a8bc6c755da1d084e52e7bdf00a842f" exitCode=0 Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.030708 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v94kc" event={"ID":"08bb8992-f20d-4480-9126-1793aa64b210","Type":"ContainerDied","Data":"fb252b2e864117692fe8c44167b67d7b4a8bc6c755da1d084e52e7bdf00a842f"} Jan 31 06:07:43 crc kubenswrapper[4712]: I0131 06:07:43.033545 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:07:43 crc kubenswrapper[4712]: E0131 06:07:43.204136 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:07:44 crc kubenswrapper[4712]: I0131 06:07:44.042854 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:07:44 crc kubenswrapper[4712]: E0131 06:07:44.043516 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:07:53 crc kubenswrapper[4712]: I0131 06:07:53.167404 4712 generic.go:334] "Generic (PLEG): container finished" podID="08bb8992-f20d-4480-9126-1793aa64b210" containerID="3356311bd1ca56a28345c760fc0fe4280dbdef6105675393a432bd4e14163e2a" exitCode=0 Jan 31 06:07:53 crc kubenswrapper[4712]: I0131 06:07:53.167461 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v94kc" event={"ID":"08bb8992-f20d-4480-9126-1793aa64b210","Type":"ContainerDied","Data":"3356311bd1ca56a28345c760fc0fe4280dbdef6105675393a432bd4e14163e2a"} Jan 31 06:07:55 crc kubenswrapper[4712]: I0131 06:07:55.205544 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v94kc" event={"ID":"08bb8992-f20d-4480-9126-1793aa64b210","Type":"ContainerStarted","Data":"0455e65bea668583efea63950a8bf6d2e280012f2dbf814846a5d1f4c9a39fcc"} Jan 31 06:07:55 crc kubenswrapper[4712]: I0131 06:07:55.224491 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v94kc" podStartSLOduration=4.345858905 podStartE2EDuration="15.224468513s" podCreationTimestamp="2026-01-31 06:07:40 +0000 UTC" firstStartedPulling="2026-01-31 06:07:43.032345456 +0000 UTC m=+1729.126227297" lastFinishedPulling="2026-01-31 06:07:53.910955064 +0000 UTC m=+1740.004836905" observedRunningTime="2026-01-31 06:07:55.223287774 +0000 UTC m=+1741.317169695" watchObservedRunningTime="2026-01-31 06:07:55.224468513 +0000 UTC m=+1741.318350364" Jan 31 06:07:58 crc kubenswrapper[4712]: I0131 06:07:58.505384 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:07:58 crc kubenswrapper[4712]: E0131 06:07:58.506055 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.208223 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.208631 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.256613 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.306839 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v94kc" Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.545491 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v94kc"] Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.586071 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 06:08:01 crc kubenswrapper[4712]: I0131 06:08:01.586431 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lvdxh" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="registry-server" containerID="cri-o://8ac82e7ec95b3e1a5e8b022a48adbe235a4aae53e719a1ec944bad1e4fb0a5d1" gracePeriod=2 Jan 31 06:08:02 crc kubenswrapper[4712]: I0131 06:08:02.269009 4712 generic.go:334] "Generic (PLEG): container finished" podID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerID="8ac82e7ec95b3e1a5e8b022a48adbe235a4aae53e719a1ec944bad1e4fb0a5d1" exitCode=0 Jan 31 06:08:02 crc kubenswrapper[4712]: I0131 06:08:02.269077 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerDied","Data":"8ac82e7ec95b3e1a5e8b022a48adbe235a4aae53e719a1ec944bad1e4fb0a5d1"} Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.536861 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.706883 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz6mh\" (UniqueName: \"kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh\") pod \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.707366 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities\") pod \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.707469 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content\") pod \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\" (UID: \"203c6cb9-4fba-4d77-9bf4-9825f1419e4e\") " Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.707931 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities" (OuterVolumeSpecName: "utilities") pod "203c6cb9-4fba-4d77-9bf4-9825f1419e4e" (UID: "203c6cb9-4fba-4d77-9bf4-9825f1419e4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.708783 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.716030 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh" (OuterVolumeSpecName: "kube-api-access-pz6mh") pod "203c6cb9-4fba-4d77-9bf4-9825f1419e4e" (UID: "203c6cb9-4fba-4d77-9bf4-9825f1419e4e"). InnerVolumeSpecName "kube-api-access-pz6mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.760191 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "203c6cb9-4fba-4d77-9bf4-9825f1419e4e" (UID: "203c6cb9-4fba-4d77-9bf4-9825f1419e4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.810205 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz6mh\" (UniqueName: \"kubernetes.io/projected/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-kube-api-access-pz6mh\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:04 crc kubenswrapper[4712]: I0131 06:08:04.810234 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203c6cb9-4fba-4d77-9bf4-9825f1419e4e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.300282 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lvdxh" event={"ID":"203c6cb9-4fba-4d77-9bf4-9825f1419e4e","Type":"ContainerDied","Data":"caed9e72eb17f5822e923dc34c2d69a8219a714dd22aeaa4e216253fafd46e11"} Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.300336 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lvdxh" Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.300355 4712 scope.go:117] "RemoveContainer" containerID="8ac82e7ec95b3e1a5e8b022a48adbe235a4aae53e719a1ec944bad1e4fb0a5d1" Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.328625 4712 scope.go:117] "RemoveContainer" containerID="123298f45a34b31c129acb6212fb447683eda567437cff0a00515d934a5e56d0" Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.335000 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.346646 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lvdxh"] Jan 31 06:08:05 crc kubenswrapper[4712]: I0131 06:08:05.388943 4712 scope.go:117] "RemoveContainer" containerID="f193d8ab8e082aa426959dfc4c4d38fbe953bfab7fb081c5648b61f08c47484b" Jan 31 06:08:06 crc kubenswrapper[4712]: I0131 06:08:06.518746 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" path="/var/lib/kubelet/pods/203c6cb9-4fba-4d77-9bf4-9825f1419e4e/volumes" Jan 31 06:08:09 crc kubenswrapper[4712]: I0131 06:08:09.504105 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:08:09 crc kubenswrapper[4712]: E0131 06:08:09.504746 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:08:21 crc kubenswrapper[4712]: I0131 06:08:21.504738 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:08:21 crc kubenswrapper[4712]: E0131 06:08:21.505625 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.221582 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:26 crc kubenswrapper[4712]: E0131 06:08:26.222718 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="extract-utilities" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.222734 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="extract-utilities" Jan 31 06:08:26 crc kubenswrapper[4712]: E0131 06:08:26.222785 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="registry-server" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.222793 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="registry-server" Jan 31 06:08:26 crc kubenswrapper[4712]: E0131 06:08:26.222807 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="extract-content" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.222813 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="extract-content" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.222997 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="203c6cb9-4fba-4d77-9bf4-9825f1419e4e" containerName="registry-server" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.224544 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.237839 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.337341 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.337427 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.337507 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h284f\" (UniqueName: \"kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.439681 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h284f\" (UniqueName: \"kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.439842 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.439869 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.440390 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.440438 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.460045 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h284f\" (UniqueName: \"kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f\") pod \"certified-operators-jtlm4\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:26 crc kubenswrapper[4712]: I0131 06:08:26.566283 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:27 crc kubenswrapper[4712]: I0131 06:08:27.093264 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:27 crc kubenswrapper[4712]: I0131 06:08:27.516479 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerStarted","Data":"be9b5790fa110eb9b3d04d12b6e1395d46761f0a1fcb0d49278dac18d1603102"} Jan 31 06:08:28 crc kubenswrapper[4712]: I0131 06:08:28.527993 4712 generic.go:334] "Generic (PLEG): container finished" podID="0cf85667-9fce-4569-815c-12c683426e92" containerID="147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0" exitCode=0 Jan 31 06:08:28 crc kubenswrapper[4712]: I0131 06:08:28.528103 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerDied","Data":"147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0"} Jan 31 06:08:31 crc kubenswrapper[4712]: I0131 06:08:31.558830 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerStarted","Data":"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb"} Jan 31 06:08:32 crc kubenswrapper[4712]: I0131 06:08:32.569839 4712 generic.go:334] "Generic (PLEG): container finished" podID="0cf85667-9fce-4569-815c-12c683426e92" containerID="9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb" exitCode=0 Jan 31 06:08:32 crc kubenswrapper[4712]: I0131 06:08:32.569902 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerDied","Data":"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb"} Jan 31 06:08:34 crc kubenswrapper[4712]: I0131 06:08:34.595946 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerStarted","Data":"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992"} Jan 31 06:08:35 crc kubenswrapper[4712]: I0131 06:08:35.504266 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:08:35 crc kubenswrapper[4712]: E0131 06:08:35.504967 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:08:35 crc kubenswrapper[4712]: I0131 06:08:35.627944 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jtlm4" podStartSLOduration=3.9409723469999998 podStartE2EDuration="9.627916731s" podCreationTimestamp="2026-01-31 06:08:26 +0000 UTC" firstStartedPulling="2026-01-31 06:08:28.530566583 +0000 UTC m=+1774.624448434" lastFinishedPulling="2026-01-31 06:08:34.217510987 +0000 UTC m=+1780.311392818" observedRunningTime="2026-01-31 06:08:35.621934997 +0000 UTC m=+1781.715816848" watchObservedRunningTime="2026-01-31 06:08:35.627916731 +0000 UTC m=+1781.721798572" Jan 31 06:08:36 crc kubenswrapper[4712]: I0131 06:08:36.566464 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:36 crc kubenswrapper[4712]: I0131 06:08:36.566836 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:36 crc kubenswrapper[4712]: I0131 06:08:36.630659 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:46 crc kubenswrapper[4712]: I0131 06:08:46.622744 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:47 crc kubenswrapper[4712]: I0131 06:08:46.680826 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:47 crc kubenswrapper[4712]: I0131 06:08:46.747800 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jtlm4" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="registry-server" containerID="cri-o://af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992" gracePeriod=2 Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.506933 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:08:48 crc kubenswrapper[4712]: E0131 06:08:48.507430 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.518430 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.653166 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities\") pod \"0cf85667-9fce-4569-815c-12c683426e92\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.653373 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h284f\" (UniqueName: \"kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f\") pod \"0cf85667-9fce-4569-815c-12c683426e92\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.653422 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content\") pod \"0cf85667-9fce-4569-815c-12c683426e92\" (UID: \"0cf85667-9fce-4569-815c-12c683426e92\") " Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.655302 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities" (OuterVolumeSpecName: "utilities") pod "0cf85667-9fce-4569-815c-12c683426e92" (UID: "0cf85667-9fce-4569-815c-12c683426e92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.666459 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f" (OuterVolumeSpecName: "kube-api-access-h284f") pod "0cf85667-9fce-4569-815c-12c683426e92" (UID: "0cf85667-9fce-4569-815c-12c683426e92"). InnerVolumeSpecName "kube-api-access-h284f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.704365 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cf85667-9fce-4569-815c-12c683426e92" (UID: "0cf85667-9fce-4569-815c-12c683426e92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.755774 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.755815 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h284f\" (UniqueName: \"kubernetes.io/projected/0cf85667-9fce-4569-815c-12c683426e92-kube-api-access-h284f\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.755826 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cf85667-9fce-4569-815c-12c683426e92-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.772926 4712 generic.go:334] "Generic (PLEG): container finished" podID="0cf85667-9fce-4569-815c-12c683426e92" containerID="af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992" exitCode=0 Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.772991 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerDied","Data":"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992"} Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.773042 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jtlm4" event={"ID":"0cf85667-9fce-4569-815c-12c683426e92","Type":"ContainerDied","Data":"be9b5790fa110eb9b3d04d12b6e1395d46761f0a1fcb0d49278dac18d1603102"} Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.773074 4712 scope.go:117] "RemoveContainer" containerID="af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.773083 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jtlm4" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.811404 4712 scope.go:117] "RemoveContainer" containerID="9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.827591 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.837200 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jtlm4"] Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.842063 4712 scope.go:117] "RemoveContainer" containerID="147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.883805 4712 scope.go:117] "RemoveContainer" containerID="af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992" Jan 31 06:08:48 crc kubenswrapper[4712]: E0131 06:08:48.884365 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992\": container with ID starting with af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992 not found: ID does not exist" containerID="af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.884409 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992"} err="failed to get container status \"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992\": rpc error: code = NotFound desc = could not find container \"af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992\": container with ID starting with af7a2d5d1ab35e4ef9ea12b30719d78047e8e5d90da6a2771882c5dce47d5992 not found: ID does not exist" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.884444 4712 scope.go:117] "RemoveContainer" containerID="9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb" Jan 31 06:08:48 crc kubenswrapper[4712]: E0131 06:08:48.884761 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb\": container with ID starting with 9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb not found: ID does not exist" containerID="9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.884789 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb"} err="failed to get container status \"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb\": rpc error: code = NotFound desc = could not find container \"9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb\": container with ID starting with 9b8d35d35126d2b0051f3495389b14a68fa64125e89a4c17270edcf46de368fb not found: ID does not exist" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.884810 4712 scope.go:117] "RemoveContainer" containerID="147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0" Jan 31 06:08:48 crc kubenswrapper[4712]: E0131 06:08:48.885158 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0\": container with ID starting with 147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0 not found: ID does not exist" containerID="147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0" Jan 31 06:08:48 crc kubenswrapper[4712]: I0131 06:08:48.885201 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0"} err="failed to get container status \"147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0\": rpc error: code = NotFound desc = could not find container \"147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0\": container with ID starting with 147b238840c5dcc59a46bba23c9e276ed27985857863d5c31584ddebb1714de0 not found: ID does not exist" Jan 31 06:08:48 crc kubenswrapper[4712]: E0131 06:08:48.974628 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cf85667_9fce_4569_815c_12c683426e92.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cf85667_9fce_4569_815c_12c683426e92.slice/crio-be9b5790fa110eb9b3d04d12b6e1395d46761f0a1fcb0d49278dac18d1603102\": RecentStats: unable to find data in memory cache]" Jan 31 06:08:50 crc kubenswrapper[4712]: I0131 06:08:50.514498 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cf85667-9fce-4569-815c-12c683426e92" path="/var/lib/kubelet/pods/0cf85667-9fce-4569-815c-12c683426e92/volumes" Jan 31 06:08:59 crc kubenswrapper[4712]: I0131 06:08:59.504949 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:08:59 crc kubenswrapper[4712]: E0131 06:08:59.507230 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:09:10 crc kubenswrapper[4712]: I0131 06:09:10.504711 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:09:10 crc kubenswrapper[4712]: E0131 06:09:10.506462 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:09:24 crc kubenswrapper[4712]: I0131 06:09:24.511727 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:09:24 crc kubenswrapper[4712]: E0131 06:09:24.512695 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:09:38 crc kubenswrapper[4712]: I0131 06:09:38.504642 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:09:38 crc kubenswrapper[4712]: E0131 06:09:38.505926 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:09:41 crc kubenswrapper[4712]: I0131 06:09:41.042836 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5785-account-create-update-bkvrb"] Jan 31 06:09:41 crc kubenswrapper[4712]: I0131 06:09:41.053758 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-c9pcv"] Jan 31 06:09:41 crc kubenswrapper[4712]: I0131 06:09:41.064864 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5785-account-create-update-bkvrb"] Jan 31 06:09:41 crc kubenswrapper[4712]: I0131 06:09:41.074901 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-c9pcv"] Jan 31 06:09:42 crc kubenswrapper[4712]: I0131 06:09:42.514662 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84dacb4f-7fb4-47fb-a405-5550d56c54c6" path="/var/lib/kubelet/pods/84dacb4f-7fb4-47fb-a405-5550d56c54c6/volumes" Jan 31 06:09:42 crc kubenswrapper[4712]: I0131 06:09:42.515341 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fef33d31-f3fd-4d92-8510-520b0d7517de" path="/var/lib/kubelet/pods/fef33d31-f3fd-4d92-8510-520b0d7517de/volumes" Jan 31 06:09:47 crc kubenswrapper[4712]: I0131 06:09:47.027456 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8e51-account-create-update-psnbh"] Jan 31 06:09:47 crc kubenswrapper[4712]: I0131 06:09:47.036608 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8e51-account-create-update-psnbh"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.036713 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-nkvdt"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.072624 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-b4ph9"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.083536 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-nkvdt"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.092973 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-b4ph9"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.100732 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8a1d-account-create-update-9mm2b"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.109324 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8a1d-account-create-update-9mm2b"] Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.516667 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b0cb952-b4c3-4cb2-b420-72d735bf02f0" path="/var/lib/kubelet/pods/1b0cb952-b4c3-4cb2-b420-72d735bf02f0/volumes" Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.517306 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b036491-3b39-44db-b9c8-f1ab0fc6034e" path="/var/lib/kubelet/pods/6b036491-3b39-44db-b9c8-f1ab0fc6034e/volumes" Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.517903 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b022b2-f7c3-489e-832a-116b9d4edbf7" path="/var/lib/kubelet/pods/80b022b2-f7c3-489e-832a-116b9d4edbf7/volumes" Jan 31 06:09:48 crc kubenswrapper[4712]: I0131 06:09:48.518494 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e2a723-54a1-42ac-8e8c-3ca125b6d01c" path="/var/lib/kubelet/pods/e7e2a723-54a1-42ac-8e8c-3ca125b6d01c/volumes" Jan 31 06:09:52 crc kubenswrapper[4712]: I0131 06:09:52.505035 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:09:52 crc kubenswrapper[4712]: E0131 06:09:52.505961 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:09:55 crc kubenswrapper[4712]: I0131 06:09:55.041342 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-667z8"] Jan 31 06:09:55 crc kubenswrapper[4712]: I0131 06:09:55.051584 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-ldrnk"] Jan 31 06:09:55 crc kubenswrapper[4712]: I0131 06:09:55.061082 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-ldrnk"] Jan 31 06:09:55 crc kubenswrapper[4712]: I0131 06:09:55.069910 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-667z8"] Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.039665 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-72bxm"] Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.051607 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8725-account-create-update-n2mlq"] Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.062298 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8725-account-create-update-n2mlq"] Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.072754 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-72bxm"] Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.518716 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a2b25a7-db91-4e29-9d5e-6acaa45cc200" path="/var/lib/kubelet/pods/0a2b25a7-db91-4e29-9d5e-6acaa45cc200/volumes" Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.519906 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc" path="/var/lib/kubelet/pods/24b09fe5-c6dc-4a74-97ce-e4c2295cf6bc/volumes" Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.521315 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="907ab3c5-1c04-422a-ac94-a6179bd6e9ac" path="/var/lib/kubelet/pods/907ab3c5-1c04-422a-ac94-a6179bd6e9ac/volumes" Jan 31 06:09:56 crc kubenswrapper[4712]: I0131 06:09:56.522305 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdbe92ef-e3d9-4528-bc92-ba7309daafe4" path="/var/lib/kubelet/pods/fdbe92ef-e3d9-4528-bc92-ba7309daafe4/volumes" Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.036708 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-246d-account-create-update-zm6nr"] Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.047535 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-19bb-account-create-update-stcpz"] Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.057896 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-g4wb5"] Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.066849 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-246d-account-create-update-zm6nr"] Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.074323 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-g4wb5"] Jan 31 06:09:57 crc kubenswrapper[4712]: I0131 06:09:57.083005 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-19bb-account-create-update-stcpz"] Jan 31 06:09:58 crc kubenswrapper[4712]: I0131 06:09:58.517214 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2998b9bb-f92a-427c-824a-80e66ff0643b" path="/var/lib/kubelet/pods/2998b9bb-f92a-427c-824a-80e66ff0643b/volumes" Jan 31 06:09:58 crc kubenswrapper[4712]: I0131 06:09:58.518481 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c8e6c2-dd63-4d15-8c59-f1aa014bdebe" path="/var/lib/kubelet/pods/47c8e6c2-dd63-4d15-8c59-f1aa014bdebe/volumes" Jan 31 06:09:58 crc kubenswrapper[4712]: I0131 06:09:58.519047 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a28a1ff-7c14-412e-9a7f-5a0018859762" path="/var/lib/kubelet/pods/4a28a1ff-7c14-412e-9a7f-5a0018859762/volumes" Jan 31 06:10:04 crc kubenswrapper[4712]: I0131 06:10:04.519591 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:10:04 crc kubenswrapper[4712]: E0131 06:10:04.522128 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:10:15 crc kubenswrapper[4712]: I0131 06:10:15.504076 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:10:15 crc kubenswrapper[4712]: E0131 06:10:15.508238 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.059306 4712 scope.go:117] "RemoveContainer" containerID="bd8d3151c6577f478585f23abf3ae1d5f9cd8a4b55e2e9c3708211ff631a0cc9" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.718666 4712 scope.go:117] "RemoveContainer" containerID="1e763a437a98264799baa28a2d2be454f1da0e8c5b139ee5d3fec05fd6a2e138" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.783854 4712 scope.go:117] "RemoveContainer" containerID="18d8bf83d8cfdc0f92ec0529fa02b2b9e5c064dc7daf588f5ac3c586cb2862ad" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.837582 4712 scope.go:117] "RemoveContainer" containerID="bec24cfcc57afb714b2a7efdb30b61f60854939b8395c6689f8807c799c1755f" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.861164 4712 scope.go:117] "RemoveContainer" containerID="3aa786383ad64c6d2b633d4ad53991df3fef3f6c8ee1fdb8f001b69aaf4f98d6" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.910107 4712 scope.go:117] "RemoveContainer" containerID="94a6b9d9c3e02ef9bc54f44658a31997f8d9641e3979d1fa57594dff9c223dcd" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.956949 4712 scope.go:117] "RemoveContainer" containerID="0e072e19229f70590e4d2e4a27fb2530ab526dd4f489af075125ce90f3ce3ada" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.978556 4712 scope.go:117] "RemoveContainer" containerID="956833f5d936e29243b8676d025075d9b3ab4ce0a134b2649a294772c9ab1045" Jan 31 06:10:17 crc kubenswrapper[4712]: I0131 06:10:17.998602 4712 scope.go:117] "RemoveContainer" containerID="40cb4bc04c82a7fc9e2d4f5f8cd6482a65a37c5d8c9afd580242c932bc276d4e" Jan 31 06:10:18 crc kubenswrapper[4712]: I0131 06:10:18.037359 4712 scope.go:117] "RemoveContainer" containerID="af1f623d801e4c4eb1203c9af66039086856a7a3ab7159460911bfae59536649" Jan 31 06:10:18 crc kubenswrapper[4712]: I0131 06:10:18.060638 4712 scope.go:117] "RemoveContainer" containerID="a47ee06fc8fead4f4e098e32c959ea6ba4e9d12300387f6c2b83372e66f2fa61" Jan 31 06:10:18 crc kubenswrapper[4712]: I0131 06:10:18.084871 4712 scope.go:117] "RemoveContainer" containerID="b92aee5d221fdcd20dc0d76dd0ceb4994e8f12904ac4c6ccbdbcd586c7db175f" Jan 31 06:10:18 crc kubenswrapper[4712]: I0131 06:10:18.111673 4712 scope.go:117] "RemoveContainer" containerID="bbb1a0f36ad3d917b5eaf0a7c8df3030210841050bff90066b61814c48b0e3b5" Jan 31 06:10:27 crc kubenswrapper[4712]: I0131 06:10:27.504753 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:10:27 crc kubenswrapper[4712]: E0131 06:10:27.505684 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:10:40 crc kubenswrapper[4712]: I0131 06:10:40.504813 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:10:40 crc kubenswrapper[4712]: E0131 06:10:40.505784 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:10:44 crc kubenswrapper[4712]: I0131 06:10:44.069008 4712 generic.go:334] "Generic (PLEG): container finished" podID="d61c9236-0514-4d46-b7a7-49f8d5e63685" containerID="a69d4a51555c64711113e4e101d6e2c350139700a4ca76cf81107863995a57e5" exitCode=0 Jan 31 06:10:44 crc kubenswrapper[4712]: I0131 06:10:44.069082 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" event={"ID":"d61c9236-0514-4d46-b7a7-49f8d5e63685","Type":"ContainerDied","Data":"a69d4a51555c64711113e4e101d6e2c350139700a4ca76cf81107863995a57e5"} Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.505393 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.592205 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxwph\" (UniqueName: \"kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph\") pod \"d61c9236-0514-4d46-b7a7-49f8d5e63685\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.592269 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam\") pod \"d61c9236-0514-4d46-b7a7-49f8d5e63685\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.592351 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle\") pod \"d61c9236-0514-4d46-b7a7-49f8d5e63685\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.592399 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory\") pod \"d61c9236-0514-4d46-b7a7-49f8d5e63685\" (UID: \"d61c9236-0514-4d46-b7a7-49f8d5e63685\") " Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.598994 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d61c9236-0514-4d46-b7a7-49f8d5e63685" (UID: "d61c9236-0514-4d46-b7a7-49f8d5e63685"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.600339 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph" (OuterVolumeSpecName: "kube-api-access-xxwph") pod "d61c9236-0514-4d46-b7a7-49f8d5e63685" (UID: "d61c9236-0514-4d46-b7a7-49f8d5e63685"). InnerVolumeSpecName "kube-api-access-xxwph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.625064 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d61c9236-0514-4d46-b7a7-49f8d5e63685" (UID: "d61c9236-0514-4d46-b7a7-49f8d5e63685"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.633366 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory" (OuterVolumeSpecName: "inventory") pod "d61c9236-0514-4d46-b7a7-49f8d5e63685" (UID: "d61c9236-0514-4d46-b7a7-49f8d5e63685"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.696941 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxwph\" (UniqueName: \"kubernetes.io/projected/d61c9236-0514-4d46-b7a7-49f8d5e63685-kube-api-access-xxwph\") on node \"crc\" DevicePath \"\"" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.696996 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.697012 4712 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:10:45 crc kubenswrapper[4712]: I0131 06:10:45.697024 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d61c9236-0514-4d46-b7a7-49f8d5e63685-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.090342 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" event={"ID":"d61c9236-0514-4d46-b7a7-49f8d5e63685","Type":"ContainerDied","Data":"cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8"} Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.090396 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc0b0b1dc86a43b6d82ef1071cbe4fd49eef37439acb1d8f26a81c1d569c2ba8" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.090424 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d9662" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.177315 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c"] Jan 31 06:10:46 crc kubenswrapper[4712]: E0131 06:10:46.177816 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="extract-utilities" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.177841 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="extract-utilities" Jan 31 06:10:46 crc kubenswrapper[4712]: E0131 06:10:46.177859 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="extract-content" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.177869 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="extract-content" Jan 31 06:10:46 crc kubenswrapper[4712]: E0131 06:10:46.177885 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="registry-server" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.177893 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="registry-server" Jan 31 06:10:46 crc kubenswrapper[4712]: E0131 06:10:46.177933 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61c9236-0514-4d46-b7a7-49f8d5e63685" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.177940 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61c9236-0514-4d46-b7a7-49f8d5e63685" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.178128 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61c9236-0514-4d46-b7a7-49f8d5e63685" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.178160 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cf85667-9fce-4569-815c-12c683426e92" containerName="registry-server" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.179009 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.181349 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.185786 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.186691 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.187605 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.197284 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c"] Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.208879 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.209019 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9w6q\" (UniqueName: \"kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.209052 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.311352 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.311469 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9w6q\" (UniqueName: \"kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.311513 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.316928 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.322758 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.330739 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9w6q\" (UniqueName: \"kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:46 crc kubenswrapper[4712]: I0131 06:10:46.498900 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:10:47 crc kubenswrapper[4712]: I0131 06:10:47.007658 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c"] Jan 31 06:10:47 crc kubenswrapper[4712]: I0131 06:10:47.100692 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" event={"ID":"d52a5e5f-6195-4acc-b30b-c872b19bbd10","Type":"ContainerStarted","Data":"4219789e512fee4054210fb6e5f239f5deac08452bd956f3bddc242ad3a90efc"} Jan 31 06:10:51 crc kubenswrapper[4712]: I0131 06:10:51.162900 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" event={"ID":"d52a5e5f-6195-4acc-b30b-c872b19bbd10","Type":"ContainerStarted","Data":"840a525961a4c4a0fe426cff527f01bafee007277573cdd103ff43afc4367b1f"} Jan 31 06:10:51 crc kubenswrapper[4712]: I0131 06:10:51.182030 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" podStartSLOduration=1.570866562 podStartE2EDuration="5.18200565s" podCreationTimestamp="2026-01-31 06:10:46 +0000 UTC" firstStartedPulling="2026-01-31 06:10:47.01178112 +0000 UTC m=+1913.105662961" lastFinishedPulling="2026-01-31 06:10:50.622920208 +0000 UTC m=+1916.716802049" observedRunningTime="2026-01-31 06:10:51.177229125 +0000 UTC m=+1917.271110986" watchObservedRunningTime="2026-01-31 06:10:51.18200565 +0000 UTC m=+1917.275887491" Jan 31 06:10:53 crc kubenswrapper[4712]: I0131 06:10:53.504660 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:10:53 crc kubenswrapper[4712]: E0131 06:10:53.505674 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:10:55 crc kubenswrapper[4712]: I0131 06:10:55.042553 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-mngsk"] Jan 31 06:10:55 crc kubenswrapper[4712]: I0131 06:10:55.055075 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-mngsk"] Jan 31 06:10:56 crc kubenswrapper[4712]: I0131 06:10:56.516146 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df94e08-77d8-4734-9985-81bc69a91cf2" path="/var/lib/kubelet/pods/1df94e08-77d8-4734-9985-81bc69a91cf2/volumes" Jan 31 06:10:57 crc kubenswrapper[4712]: I0131 06:10:57.031712 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-qp52f"] Jan 31 06:10:57 crc kubenswrapper[4712]: I0131 06:10:57.040873 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-qp52f"] Jan 31 06:10:58 crc kubenswrapper[4712]: I0131 06:10:58.517198 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a59772f8-fb2b-4ccd-80e3-de90890503d9" path="/var/lib/kubelet/pods/a59772f8-fb2b-4ccd-80e3-de90890503d9/volumes" Jan 31 06:11:05 crc kubenswrapper[4712]: I0131 06:11:05.505091 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:11:05 crc kubenswrapper[4712]: E0131 06:11:05.506163 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:11:18 crc kubenswrapper[4712]: I0131 06:11:18.439455 4712 scope.go:117] "RemoveContainer" containerID="ee32e834fe986a5f8e2ce9c1b9142d868ea133db1b4431eef9092ca4a9998cf3" Jan 31 06:11:18 crc kubenswrapper[4712]: I0131 06:11:18.464678 4712 scope.go:117] "RemoveContainer" containerID="bc385aff64b8c56b41714eca2dbc2a8109dbbf2053ec525d5961a3dcdff7f6bb" Jan 31 06:11:18 crc kubenswrapper[4712]: I0131 06:11:18.506324 4712 scope.go:117] "RemoveContainer" containerID="82dc477ea41d1afefec9b15ce55e9d53bb61245bc13a203e4d408bc8d205ab81" Jan 31 06:11:18 crc kubenswrapper[4712]: I0131 06:11:18.507324 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:11:18 crc kubenswrapper[4712]: E0131 06:11:18.507920 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:11:18 crc kubenswrapper[4712]: I0131 06:11:18.578771 4712 scope.go:117] "RemoveContainer" containerID="75936447e03c20aa958d4ccd66376c411ed81cf2f1f315a1718dce6e3f68f815" Jan 31 06:11:30 crc kubenswrapper[4712]: I0131 06:11:30.504408 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:11:30 crc kubenswrapper[4712]: E0131 06:11:30.505281 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:11:34 crc kubenswrapper[4712]: I0131 06:11:34.044185 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-6l275"] Jan 31 06:11:34 crc kubenswrapper[4712]: I0131 06:11:34.056468 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-6l275"] Jan 31 06:11:34 crc kubenswrapper[4712]: I0131 06:11:34.533188 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1368cce3-9cef-4215-bb30-e9b16399e5d6" path="/var/lib/kubelet/pods/1368cce3-9cef-4215-bb30-e9b16399e5d6/volumes" Jan 31 06:11:37 crc kubenswrapper[4712]: I0131 06:11:37.036491 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jc5lg"] Jan 31 06:11:37 crc kubenswrapper[4712]: I0131 06:11:37.048322 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jc5lg"] Jan 31 06:11:38 crc kubenswrapper[4712]: I0131 06:11:38.030472 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mh8d8"] Jan 31 06:11:38 crc kubenswrapper[4712]: I0131 06:11:38.037764 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mh8d8"] Jan 31 06:11:38 crc kubenswrapper[4712]: I0131 06:11:38.515649 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66188e51-a34f-43f3-b12f-ea74a367587f" path="/var/lib/kubelet/pods/66188e51-a34f-43f3-b12f-ea74a367587f/volumes" Jan 31 06:11:38 crc kubenswrapper[4712]: I0131 06:11:38.516313 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="830c0611-e4bf-4fae-96ed-d3a69bdff35f" path="/var/lib/kubelet/pods/830c0611-e4bf-4fae-96ed-d3a69bdff35f/volumes" Jan 31 06:11:42 crc kubenswrapper[4712]: I0131 06:11:42.504187 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:11:42 crc kubenswrapper[4712]: E0131 06:11:42.505290 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:11:53 crc kubenswrapper[4712]: I0131 06:11:53.505147 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:11:53 crc kubenswrapper[4712]: E0131 06:11:53.507290 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:12:01 crc kubenswrapper[4712]: I0131 06:12:01.046057 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-tp7gg"] Jan 31 06:12:01 crc kubenswrapper[4712]: I0131 06:12:01.053748 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-tp7gg"] Jan 31 06:12:02 crc kubenswrapper[4712]: I0131 06:12:02.517532 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a76001-83c3-470c-aede-3fe832068688" path="/var/lib/kubelet/pods/29a76001-83c3-470c-aede-3fe832068688/volumes" Jan 31 06:12:06 crc kubenswrapper[4712]: I0131 06:12:06.504378 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:12:06 crc kubenswrapper[4712]: E0131 06:12:06.505360 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:12:12 crc kubenswrapper[4712]: I0131 06:12:12.058873 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bp49d"] Jan 31 06:12:12 crc kubenswrapper[4712]: I0131 06:12:12.070589 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bp49d"] Jan 31 06:12:12 crc kubenswrapper[4712]: I0131 06:12:12.516648 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f515caad-5449-4314-ba23-cc132eba7102" path="/var/lib/kubelet/pods/f515caad-5449-4314-ba23-cc132eba7102/volumes" Jan 31 06:12:18 crc kubenswrapper[4712]: I0131 06:12:18.655432 4712 scope.go:117] "RemoveContainer" containerID="2cbb289e82cc25f40631b03206a9a0c38750f66f67eb7c4d0888f37a4f0d24e3" Jan 31 06:12:18 crc kubenswrapper[4712]: I0131 06:12:18.795899 4712 scope.go:117] "RemoveContainer" containerID="873f85d491316a17cfbe60221421a4841b9f5ba1ff919757fb7cafff5cde4de2" Jan 31 06:12:18 crc kubenswrapper[4712]: I0131 06:12:18.836503 4712 scope.go:117] "RemoveContainer" containerID="5c299ce006082fecc5ad33df1f9ba1d2d6e0e13f17b30fe671d78fa56f59e00f" Jan 31 06:12:18 crc kubenswrapper[4712]: I0131 06:12:18.884878 4712 scope.go:117] "RemoveContainer" containerID="d0fd24ec0a30c3ccf1fa7b5a797d57803897051bab6d1a91f87c451dde8172f5" Jan 31 06:12:18 crc kubenswrapper[4712]: I0131 06:12:18.920766 4712 scope.go:117] "RemoveContainer" containerID="fab21e60b8618323bccecabf09432c2e453bbf57faf938e7fd6c0fec18f5d266" Jan 31 06:12:20 crc kubenswrapper[4712]: I0131 06:12:20.503871 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:12:20 crc kubenswrapper[4712]: E0131 06:12:20.504459 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:12:22 crc kubenswrapper[4712]: I0131 06:12:22.982807 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:22 crc kubenswrapper[4712]: I0131 06:12:22.985321 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:22 crc kubenswrapper[4712]: I0131 06:12:22.999852 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.057136 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj6dd\" (UniqueName: \"kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.057486 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.057582 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.160246 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj6dd\" (UniqueName: \"kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.160320 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.160343 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.160904 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.161454 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.185235 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj6dd\" (UniqueName: \"kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd\") pod \"redhat-operators-rkbcg\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.312903 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:23 crc kubenswrapper[4712]: I0131 06:12:23.893341 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:24 crc kubenswrapper[4712]: I0131 06:12:24.081557 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerStarted","Data":"e6b860aa4710ca0f15ba66dcf5bff4e9bbf0f7dc813a9e1aa09b805487ec41c3"} Jan 31 06:12:25 crc kubenswrapper[4712]: I0131 06:12:25.091728 4712 generic.go:334] "Generic (PLEG): container finished" podID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerID="a082b65d3cf246abfb24b925e08fc01f0cd4f7ae6d329938f1f02ba013f83d79" exitCode=0 Jan 31 06:12:25 crc kubenswrapper[4712]: I0131 06:12:25.091809 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerDied","Data":"a082b65d3cf246abfb24b925e08fc01f0cd4f7ae6d329938f1f02ba013f83d79"} Jan 31 06:12:27 crc kubenswrapper[4712]: I0131 06:12:27.111579 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerStarted","Data":"75bd6d4d3907ea002517fac21e4cd12593e5b123abe94262485a6412a8249b24"} Jan 31 06:12:28 crc kubenswrapper[4712]: I0131 06:12:28.122114 4712 generic.go:334] "Generic (PLEG): container finished" podID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerID="75bd6d4d3907ea002517fac21e4cd12593e5b123abe94262485a6412a8249b24" exitCode=0 Jan 31 06:12:28 crc kubenswrapper[4712]: I0131 06:12:28.122203 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerDied","Data":"75bd6d4d3907ea002517fac21e4cd12593e5b123abe94262485a6412a8249b24"} Jan 31 06:12:34 crc kubenswrapper[4712]: I0131 06:12:34.515665 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:12:34 crc kubenswrapper[4712]: E0131 06:12:34.516642 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:12:37 crc kubenswrapper[4712]: I0131 06:12:37.029064 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-hkgvt"] Jan 31 06:12:37 crc kubenswrapper[4712]: I0131 06:12:37.039629 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-3898-account-create-update-xd6dr"] Jan 31 06:12:37 crc kubenswrapper[4712]: I0131 06:12:37.048675 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-3898-account-create-update-xd6dr"] Jan 31 06:12:37 crc kubenswrapper[4712]: I0131 06:12:37.056107 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-hkgvt"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.045993 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wmdr2"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.060676 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6ea2-account-create-update-sk5tw"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.069335 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-xjh9f"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.077066 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-4029-account-create-update-2kvgn"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.084190 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wmdr2"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.091387 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-xjh9f"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.100330 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-4029-account-create-update-2kvgn"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.108436 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6ea2-account-create-update-sk5tw"] Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.516667 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="653c7ebd-bdbe-4ef2-910c-67ab033d8aad" path="/var/lib/kubelet/pods/653c7ebd-bdbe-4ef2-910c-67ab033d8aad/volumes" Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.517505 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7431f59c-b102-4ea8-b8d4-f1d7f373af85" path="/var/lib/kubelet/pods/7431f59c-b102-4ea8-b8d4-f1d7f373af85/volumes" Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.518317 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db4b3a3-a250-4150-8ee3-15d770bd611b" path="/var/lib/kubelet/pods/9db4b3a3-a250-4150-8ee3-15d770bd611b/volumes" Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.519143 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3d1772f-7207-41bf-91bd-8d46663fecc6" path="/var/lib/kubelet/pods/d3d1772f-7207-41bf-91bd-8d46663fecc6/volumes" Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.520612 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5b506bd-07dd-439e-add3-1bd487999c1a" path="/var/lib/kubelet/pods/e5b506bd-07dd-439e-add3-1bd487999c1a/volumes" Jan 31 06:12:38 crc kubenswrapper[4712]: I0131 06:12:38.521262 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb27f5c1-d0a0-4aa7-97ee-e271d63a722d" path="/var/lib/kubelet/pods/eb27f5c1-d0a0-4aa7-97ee-e271d63a722d/volumes" Jan 31 06:12:39 crc kubenswrapper[4712]: I0131 06:12:39.217497 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerStarted","Data":"8ad7b84e6d38cb2fdb08709e46002e498f9496afd71dcf6eed51f6d7ced3ca34"} Jan 31 06:12:39 crc kubenswrapper[4712]: I0131 06:12:39.240906 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rkbcg" podStartSLOduration=3.922781005 podStartE2EDuration="17.240881394s" podCreationTimestamp="2026-01-31 06:12:22 +0000 UTC" firstStartedPulling="2026-01-31 06:12:25.09381519 +0000 UTC m=+2011.187697021" lastFinishedPulling="2026-01-31 06:12:38.411915569 +0000 UTC m=+2024.505797410" observedRunningTime="2026-01-31 06:12:39.236027366 +0000 UTC m=+2025.329909217" watchObservedRunningTime="2026-01-31 06:12:39.240881394 +0000 UTC m=+2025.334763235" Jan 31 06:12:43 crc kubenswrapper[4712]: I0131 06:12:43.313966 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:43 crc kubenswrapper[4712]: I0131 06:12:43.314407 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:44 crc kubenswrapper[4712]: I0131 06:12:44.359079 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rkbcg" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="registry-server" probeResult="failure" output=< Jan 31 06:12:44 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 06:12:44 crc kubenswrapper[4712]: > Jan 31 06:12:46 crc kubenswrapper[4712]: I0131 06:12:46.307386 4712 generic.go:334] "Generic (PLEG): container finished" podID="d52a5e5f-6195-4acc-b30b-c872b19bbd10" containerID="840a525961a4c4a0fe426cff527f01bafee007277573cdd103ff43afc4367b1f" exitCode=0 Jan 31 06:12:46 crc kubenswrapper[4712]: I0131 06:12:46.307495 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" event={"ID":"d52a5e5f-6195-4acc-b30b-c872b19bbd10","Type":"ContainerDied","Data":"840a525961a4c4a0fe426cff527f01bafee007277573cdd103ff43afc4367b1f"} Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.705824 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.828354 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam\") pod \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.828625 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory\") pod \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.828668 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9w6q\" (UniqueName: \"kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q\") pod \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\" (UID: \"d52a5e5f-6195-4acc-b30b-c872b19bbd10\") " Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.845923 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q" (OuterVolumeSpecName: "kube-api-access-v9w6q") pod "d52a5e5f-6195-4acc-b30b-c872b19bbd10" (UID: "d52a5e5f-6195-4acc-b30b-c872b19bbd10"). InnerVolumeSpecName "kube-api-access-v9w6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.864753 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d52a5e5f-6195-4acc-b30b-c872b19bbd10" (UID: "d52a5e5f-6195-4acc-b30b-c872b19bbd10"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.866418 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory" (OuterVolumeSpecName: "inventory") pod "d52a5e5f-6195-4acc-b30b-c872b19bbd10" (UID: "d52a5e5f-6195-4acc-b30b-c872b19bbd10"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.931639 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.931849 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9w6q\" (UniqueName: \"kubernetes.io/projected/d52a5e5f-6195-4acc-b30b-c872b19bbd10-kube-api-access-v9w6q\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:47 crc kubenswrapper[4712]: I0131 06:12:47.931911 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d52a5e5f-6195-4acc-b30b-c872b19bbd10-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.326590 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" event={"ID":"d52a5e5f-6195-4acc-b30b-c872b19bbd10","Type":"ContainerDied","Data":"4219789e512fee4054210fb6e5f239f5deac08452bd956f3bddc242ad3a90efc"} Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.326638 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.326637 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4219789e512fee4054210fb6e5f239f5deac08452bd956f3bddc242ad3a90efc" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.408239 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6"] Jan 31 06:12:48 crc kubenswrapper[4712]: E0131 06:12:48.408760 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52a5e5f-6195-4acc-b30b-c872b19bbd10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.408783 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52a5e5f-6195-4acc-b30b-c872b19bbd10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.408991 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52a5e5f-6195-4acc-b30b-c872b19bbd10" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.409827 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.412263 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.416382 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.416596 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.419708 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.422289 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6"] Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.442469 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.442536 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4mnh\" (UniqueName: \"kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.442683 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.545600 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.545976 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.546124 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4mnh\" (UniqueName: \"kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.557366 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.558507 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.566731 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4mnh\" (UniqueName: \"kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:48 crc kubenswrapper[4712]: I0131 06:12:48.730392 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:12:49 crc kubenswrapper[4712]: I0131 06:12:49.295398 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6"] Jan 31 06:12:49 crc kubenswrapper[4712]: I0131 06:12:49.300771 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:12:49 crc kubenswrapper[4712]: I0131 06:12:49.337092 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" event={"ID":"033a2ac5-1d6e-4c75-9792-d54b4da7ef85","Type":"ContainerStarted","Data":"76b0543755721966d061d361df463bbdd36e681a71319454ab7a83981117dae9"} Jan 31 06:12:49 crc kubenswrapper[4712]: I0131 06:12:49.504621 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:12:51 crc kubenswrapper[4712]: I0131 06:12:51.362961 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3"} Jan 31 06:12:52 crc kubenswrapper[4712]: I0131 06:12:52.375192 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" event={"ID":"033a2ac5-1d6e-4c75-9792-d54b4da7ef85","Type":"ContainerStarted","Data":"250505abdae3af91c496db8c2189adbf9630c2d96554df17267b8c2a56f5626c"} Jan 31 06:12:52 crc kubenswrapper[4712]: I0131 06:12:52.402841 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" podStartSLOduration=2.5729241480000002 podStartE2EDuration="4.402817152s" podCreationTimestamp="2026-01-31 06:12:48 +0000 UTC" firstStartedPulling="2026-01-31 06:12:49.300516269 +0000 UTC m=+2035.394398110" lastFinishedPulling="2026-01-31 06:12:51.130409273 +0000 UTC m=+2037.224291114" observedRunningTime="2026-01-31 06:12:52.399668076 +0000 UTC m=+2038.493549937" watchObservedRunningTime="2026-01-31 06:12:52.402817152 +0000 UTC m=+2038.496698983" Jan 31 06:12:53 crc kubenswrapper[4712]: I0131 06:12:53.372096 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:53 crc kubenswrapper[4712]: I0131 06:12:53.425694 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:53 crc kubenswrapper[4712]: I0131 06:12:53.616218 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:54 crc kubenswrapper[4712]: I0131 06:12:54.391610 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rkbcg" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="registry-server" containerID="cri-o://8ad7b84e6d38cb2fdb08709e46002e498f9496afd71dcf6eed51f6d7ced3ca34" gracePeriod=2 Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.405096 4712 generic.go:334] "Generic (PLEG): container finished" podID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerID="8ad7b84e6d38cb2fdb08709e46002e498f9496afd71dcf6eed51f6d7ced3ca34" exitCode=0 Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.405197 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerDied","Data":"8ad7b84e6d38cb2fdb08709e46002e498f9496afd71dcf6eed51f6d7ced3ca34"} Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.405654 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkbcg" event={"ID":"daa9ab9b-6737-4210-b4ba-24dfdaa53708","Type":"ContainerDied","Data":"e6b860aa4710ca0f15ba66dcf5bff4e9bbf0f7dc813a9e1aa09b805487ec41c3"} Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.405682 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6b860aa4710ca0f15ba66dcf5bff4e9bbf0f7dc813a9e1aa09b805487ec41c3" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.472906 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.513416 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj6dd\" (UniqueName: \"kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd\") pod \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.513482 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content\") pod \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.513587 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities\") pod \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\" (UID: \"daa9ab9b-6737-4210-b4ba-24dfdaa53708\") " Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.515041 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities" (OuterVolumeSpecName: "utilities") pod "daa9ab9b-6737-4210-b4ba-24dfdaa53708" (UID: "daa9ab9b-6737-4210-b4ba-24dfdaa53708"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.519738 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd" (OuterVolumeSpecName: "kube-api-access-sj6dd") pod "daa9ab9b-6737-4210-b4ba-24dfdaa53708" (UID: "daa9ab9b-6737-4210-b4ba-24dfdaa53708"). InnerVolumeSpecName "kube-api-access-sj6dd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.615104 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.615522 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj6dd\" (UniqueName: \"kubernetes.io/projected/daa9ab9b-6737-4210-b4ba-24dfdaa53708-kube-api-access-sj6dd\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.649194 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "daa9ab9b-6737-4210-b4ba-24dfdaa53708" (UID: "daa9ab9b-6737-4210-b4ba-24dfdaa53708"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:12:55 crc kubenswrapper[4712]: I0131 06:12:55.717527 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/daa9ab9b-6737-4210-b4ba-24dfdaa53708-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:12:56 crc kubenswrapper[4712]: I0131 06:12:56.414142 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkbcg" Jan 31 06:12:56 crc kubenswrapper[4712]: I0131 06:12:56.452871 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:56 crc kubenswrapper[4712]: I0131 06:12:56.466525 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rkbcg"] Jan 31 06:12:56 crc kubenswrapper[4712]: I0131 06:12:56.520151 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" path="/var/lib/kubelet/pods/daa9ab9b-6737-4210-b4ba-24dfdaa53708/volumes" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.106258 4712 scope.go:117] "RemoveContainer" containerID="99fa90bfc3b624390710250bf81efaec1774c15ef3534b5b8299005dbccc4f26" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.132411 4712 scope.go:117] "RemoveContainer" containerID="b9aba50b03f7db3a75e805a701552877dc0d8050d83b04d7ba13b5759bb8593a" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.178510 4712 scope.go:117] "RemoveContainer" containerID="242851c1a8694e51fd08ab74c666374d23d21aeb558c54f66421a86aacf72e83" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.224089 4712 scope.go:117] "RemoveContainer" containerID="e74f9b430434fb24908ea766dd96c0d1ac4614ccd088c89f01542fc4ed13e8f0" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.345809 4712 scope.go:117] "RemoveContainer" containerID="e63b6d7a57ec4cfe71ab4b2e5eafea9f045af81c5b18f77ed541fbef1cb94cd3" Jan 31 06:13:19 crc kubenswrapper[4712]: I0131 06:13:19.464955 4712 scope.go:117] "RemoveContainer" containerID="caf65a3c5396b3e688e58ab1c721f8117eaf4313f391c99fe95e9b27a818229d" Jan 31 06:13:30 crc kubenswrapper[4712]: I0131 06:13:30.044412 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l9r99"] Jan 31 06:13:30 crc kubenswrapper[4712]: I0131 06:13:30.054263 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l9r99"] Jan 31 06:13:30 crc kubenswrapper[4712]: I0131 06:13:30.515509 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aedf7767-82ba-4f3b-a34e-456f148922a4" path="/var/lib/kubelet/pods/aedf7767-82ba-4f3b-a34e-456f148922a4/volumes" Jan 31 06:13:50 crc kubenswrapper[4712]: I0131 06:13:50.044906 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-jpj94"] Jan 31 06:13:50 crc kubenswrapper[4712]: I0131 06:13:50.055935 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-jpj94"] Jan 31 06:13:50 crc kubenswrapper[4712]: I0131 06:13:50.514594 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb6bb4ae-9226-40ca-8117-3b62a4b91261" path="/var/lib/kubelet/pods/bb6bb4ae-9226-40ca-8117-3b62a4b91261/volumes" Jan 31 06:13:54 crc kubenswrapper[4712]: I0131 06:13:54.045533 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kzkg5"] Jan 31 06:13:54 crc kubenswrapper[4712]: I0131 06:13:54.059883 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-kzkg5"] Jan 31 06:13:54 crc kubenswrapper[4712]: I0131 06:13:54.520268 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d484669e-304d-4389-b015-9479aadf2675" path="/var/lib/kubelet/pods/d484669e-304d-4389-b015-9479aadf2675/volumes" Jan 31 06:14:06 crc kubenswrapper[4712]: I0131 06:14:06.132028 4712 generic.go:334] "Generic (PLEG): container finished" podID="033a2ac5-1d6e-4c75-9792-d54b4da7ef85" containerID="250505abdae3af91c496db8c2189adbf9630c2d96554df17267b8c2a56f5626c" exitCode=0 Jan 31 06:14:06 crc kubenswrapper[4712]: I0131 06:14:06.132104 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" event={"ID":"033a2ac5-1d6e-4c75-9792-d54b4da7ef85","Type":"ContainerDied","Data":"250505abdae3af91c496db8c2189adbf9630c2d96554df17267b8c2a56f5626c"} Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.571978 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.703528 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory\") pod \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.703665 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4mnh\" (UniqueName: \"kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh\") pod \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.703791 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam\") pod \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\" (UID: \"033a2ac5-1d6e-4c75-9792-d54b4da7ef85\") " Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.712515 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh" (OuterVolumeSpecName: "kube-api-access-b4mnh") pod "033a2ac5-1d6e-4c75-9792-d54b4da7ef85" (UID: "033a2ac5-1d6e-4c75-9792-d54b4da7ef85"). InnerVolumeSpecName "kube-api-access-b4mnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.738329 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "033a2ac5-1d6e-4c75-9792-d54b4da7ef85" (UID: "033a2ac5-1d6e-4c75-9792-d54b4da7ef85"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.741734 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory" (OuterVolumeSpecName: "inventory") pod "033a2ac5-1d6e-4c75-9792-d54b4da7ef85" (UID: "033a2ac5-1d6e-4c75-9792-d54b4da7ef85"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.806593 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.808336 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4mnh\" (UniqueName: \"kubernetes.io/projected/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-kube-api-access-b4mnh\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:07 crc kubenswrapper[4712]: I0131 06:14:07.808369 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/033a2ac5-1d6e-4c75-9792-d54b4da7ef85-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.149896 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" event={"ID":"033a2ac5-1d6e-4c75-9792-d54b4da7ef85","Type":"ContainerDied","Data":"76b0543755721966d061d361df463bbdd36e681a71319454ab7a83981117dae9"} Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.150324 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76b0543755721966d061d361df463bbdd36e681a71319454ab7a83981117dae9" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.149953 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.229541 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n"] Jan 31 06:14:08 crc kubenswrapper[4712]: E0131 06:14:08.229941 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="registry-server" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.229968 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="registry-server" Jan 31 06:14:08 crc kubenswrapper[4712]: E0131 06:14:08.230004 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="extract-content" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230010 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="extract-content" Jan 31 06:14:08 crc kubenswrapper[4712]: E0131 06:14:08.230022 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="033a2ac5-1d6e-4c75-9792-d54b4da7ef85" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230031 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="033a2ac5-1d6e-4c75-9792-d54b4da7ef85" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:08 crc kubenswrapper[4712]: E0131 06:14:08.230045 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="extract-utilities" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230051 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="extract-utilities" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230236 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa9ab9b-6737-4210-b4ba-24dfdaa53708" containerName="registry-server" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230269 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="033a2ac5-1d6e-4c75-9792-d54b4da7ef85" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.230884 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.232845 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.232845 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.233051 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.236859 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.246975 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n"] Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.322990 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.323363 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.323494 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbc69\" (UniqueName: \"kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.425902 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.425988 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.426043 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbc69\" (UniqueName: \"kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.430534 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.430537 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.444132 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbc69\" (UniqueName: \"kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b462n\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:08 crc kubenswrapper[4712]: I0131 06:14:08.550371 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:09 crc kubenswrapper[4712]: I0131 06:14:09.142349 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n"] Jan 31 06:14:09 crc kubenswrapper[4712]: W0131 06:14:09.145500 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55c71843_8f9f_4f1b_904d_e05dc4a2ea25.slice/crio-8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e WatchSource:0}: Error finding container 8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e: Status 404 returned error can't find the container with id 8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e Jan 31 06:14:09 crc kubenswrapper[4712]: I0131 06:14:09.189281 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" event={"ID":"55c71843-8f9f-4f1b-904d-e05dc4a2ea25","Type":"ContainerStarted","Data":"8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e"} Jan 31 06:14:10 crc kubenswrapper[4712]: I0131 06:14:10.199366 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" event={"ID":"55c71843-8f9f-4f1b-904d-e05dc4a2ea25","Type":"ContainerStarted","Data":"0c33cb425f3361186248ffb24edcaf41cff287da900b78dbdfa14ec6a6b44843"} Jan 31 06:14:15 crc kubenswrapper[4712]: I0131 06:14:15.241926 4712 generic.go:334] "Generic (PLEG): container finished" podID="55c71843-8f9f-4f1b-904d-e05dc4a2ea25" containerID="0c33cb425f3361186248ffb24edcaf41cff287da900b78dbdfa14ec6a6b44843" exitCode=0 Jan 31 06:14:15 crc kubenswrapper[4712]: I0131 06:14:15.242030 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" event={"ID":"55c71843-8f9f-4f1b-904d-e05dc4a2ea25","Type":"ContainerDied","Data":"0c33cb425f3361186248ffb24edcaf41cff287da900b78dbdfa14ec6a6b44843"} Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.686825 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.812495 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbc69\" (UniqueName: \"kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69\") pod \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.812651 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam\") pod \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.812769 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory\") pod \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\" (UID: \"55c71843-8f9f-4f1b-904d-e05dc4a2ea25\") " Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.835918 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69" (OuterVolumeSpecName: "kube-api-access-kbc69") pod "55c71843-8f9f-4f1b-904d-e05dc4a2ea25" (UID: "55c71843-8f9f-4f1b-904d-e05dc4a2ea25"). InnerVolumeSpecName "kube-api-access-kbc69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.915322 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory" (OuterVolumeSpecName: "inventory") pod "55c71843-8f9f-4f1b-904d-e05dc4a2ea25" (UID: "55c71843-8f9f-4f1b-904d-e05dc4a2ea25"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.915390 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbc69\" (UniqueName: \"kubernetes.io/projected/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-kube-api-access-kbc69\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:16 crc kubenswrapper[4712]: I0131 06:14:16.943287 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "55c71843-8f9f-4f1b-904d-e05dc4a2ea25" (UID: "55c71843-8f9f-4f1b-904d-e05dc4a2ea25"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.017130 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.017825 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55c71843-8f9f-4f1b-904d-e05dc4a2ea25-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.263623 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" event={"ID":"55c71843-8f9f-4f1b-904d-e05dc4a2ea25","Type":"ContainerDied","Data":"8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e"} Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.263668 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f2e75315c175729c1e830748bbea87d666f5f245a018f1bb09084c108eee74e" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.263668 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b462n" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.363591 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j"] Jan 31 06:14:17 crc kubenswrapper[4712]: E0131 06:14:17.364344 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c71843-8f9f-4f1b-904d-e05dc4a2ea25" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.364448 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c71843-8f9f-4f1b-904d-e05dc4a2ea25" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.364815 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c71843-8f9f-4f1b-904d-e05dc4a2ea25" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.365785 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.371589 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.371931 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.372121 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.372316 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.381908 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j"] Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.432039 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fh9m\" (UniqueName: \"kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.432095 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.432135 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.534479 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.534539 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fh9m\" (UniqueName: \"kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.534614 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.541679 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.541798 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.551886 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fh9m\" (UniqueName: \"kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-f6t8j\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:17 crc kubenswrapper[4712]: I0131 06:14:17.690318 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:18 crc kubenswrapper[4712]: I0131 06:14:18.272402 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j"] Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.281520 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" event={"ID":"a7836756-e240-4f18-b3cc-f820d8dd026d","Type":"ContainerStarted","Data":"af0fd41f5e7c8d792a6cfcfb1756003d3bbc19d886722e719481338967c637f7"} Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.282241 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" event={"ID":"a7836756-e240-4f18-b3cc-f820d8dd026d","Type":"ContainerStarted","Data":"6856bd9fcf8b26794f76247bb50110809a566a1195fc42478dec83dd04da10f8"} Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.310536 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" podStartSLOduration=1.6543305560000001 podStartE2EDuration="2.310508084s" podCreationTimestamp="2026-01-31 06:14:17 +0000 UTC" firstStartedPulling="2026-01-31 06:14:18.283138979 +0000 UTC m=+2124.377020820" lastFinishedPulling="2026-01-31 06:14:18.939316497 +0000 UTC m=+2125.033198348" observedRunningTime="2026-01-31 06:14:19.299969152 +0000 UTC m=+2125.393850993" watchObservedRunningTime="2026-01-31 06:14:19.310508084 +0000 UTC m=+2125.404389925" Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.768011 4712 scope.go:117] "RemoveContainer" containerID="ed9fddb4ead999a50964b5995e1584ab28d56e08fa42b77fef80229d6e3da572" Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.817116 4712 scope.go:117] "RemoveContainer" containerID="f63a07224856ba1ec27e2852b839aa1d3077fe9b49c33f31462652de2df3864b" Jan 31 06:14:19 crc kubenswrapper[4712]: I0131 06:14:19.862586 4712 scope.go:117] "RemoveContainer" containerID="3681a2c1b37e8bd0ff64b91199e6c2c709faee255dc78a48536bf8904eb1a194" Jan 31 06:14:35 crc kubenswrapper[4712]: I0131 06:14:35.042311 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-r72q5"] Jan 31 06:14:35 crc kubenswrapper[4712]: I0131 06:14:35.049904 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-r72q5"] Jan 31 06:14:36 crc kubenswrapper[4712]: I0131 06:14:36.516241 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b012712b-f57a-4334-aa1c-0264aedf8e5c" path="/var/lib/kubelet/pods/b012712b-f57a-4334-aa1c-0264aedf8e5c/volumes" Jan 31 06:14:55 crc kubenswrapper[4712]: I0131 06:14:55.614336 4712 generic.go:334] "Generic (PLEG): container finished" podID="a7836756-e240-4f18-b3cc-f820d8dd026d" containerID="af0fd41f5e7c8d792a6cfcfb1756003d3bbc19d886722e719481338967c637f7" exitCode=0 Jan 31 06:14:55 crc kubenswrapper[4712]: I0131 06:14:55.614435 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" event={"ID":"a7836756-e240-4f18-b3cc-f820d8dd026d","Type":"ContainerDied","Data":"af0fd41f5e7c8d792a6cfcfb1756003d3bbc19d886722e719481338967c637f7"} Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.058889 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.239900 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fh9m\" (UniqueName: \"kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m\") pod \"a7836756-e240-4f18-b3cc-f820d8dd026d\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.239995 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam\") pod \"a7836756-e240-4f18-b3cc-f820d8dd026d\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.240232 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory\") pod \"a7836756-e240-4f18-b3cc-f820d8dd026d\" (UID: \"a7836756-e240-4f18-b3cc-f820d8dd026d\") " Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.247672 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m" (OuterVolumeSpecName: "kube-api-access-6fh9m") pod "a7836756-e240-4f18-b3cc-f820d8dd026d" (UID: "a7836756-e240-4f18-b3cc-f820d8dd026d"). InnerVolumeSpecName "kube-api-access-6fh9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.272834 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a7836756-e240-4f18-b3cc-f820d8dd026d" (UID: "a7836756-e240-4f18-b3cc-f820d8dd026d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.286403 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory" (OuterVolumeSpecName: "inventory") pod "a7836756-e240-4f18-b3cc-f820d8dd026d" (UID: "a7836756-e240-4f18-b3cc-f820d8dd026d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.342296 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fh9m\" (UniqueName: \"kubernetes.io/projected/a7836756-e240-4f18-b3cc-f820d8dd026d-kube-api-access-6fh9m\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.342357 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.342371 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7836756-e240-4f18-b3cc-f820d8dd026d-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.634387 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" event={"ID":"a7836756-e240-4f18-b3cc-f820d8dd026d","Type":"ContainerDied","Data":"6856bd9fcf8b26794f76247bb50110809a566a1195fc42478dec83dd04da10f8"} Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.634444 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6856bd9fcf8b26794f76247bb50110809a566a1195fc42478dec83dd04da10f8" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.634510 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-f6t8j" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.735296 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp"] Jan 31 06:14:57 crc kubenswrapper[4712]: E0131 06:14:57.735714 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7836756-e240-4f18-b3cc-f820d8dd026d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.735733 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7836756-e240-4f18-b3cc-f820d8dd026d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.735949 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7836756-e240-4f18-b3cc-f820d8dd026d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.736677 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.739226 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.739253 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.739403 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.739441 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.749856 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp"] Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.751055 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x74d\" (UniqueName: \"kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.751113 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.751279 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.854106 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x74d\" (UniqueName: \"kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.854185 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.854246 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.859143 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.859139 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:57 crc kubenswrapper[4712]: I0131 06:14:57.873597 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x74d\" (UniqueName: \"kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:58 crc kubenswrapper[4712]: I0131 06:14:58.053152 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:14:58 crc kubenswrapper[4712]: I0131 06:14:58.635241 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp"] Jan 31 06:14:58 crc kubenswrapper[4712]: I0131 06:14:58.653071 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" event={"ID":"8a24ba49-5360-4c66-a06d-36f6915384a9","Type":"ContainerStarted","Data":"9d87c21f89f4399a06f4410eab48ac47ff47619648696f1b2087e10dd5b0f3bb"} Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.133190 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x"] Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.135506 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.139746 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.140494 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.150094 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x"] Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.311868 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.311945 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.311970 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jj24\" (UniqueName: \"kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.413391 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.413712 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jj24\" (UniqueName: \"kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.413913 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.414550 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.425597 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.465932 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jj24\" (UniqueName: \"kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24\") pod \"collect-profiles-29497335-f788x\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.679075 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" event={"ID":"8a24ba49-5360-4c66-a06d-36f6915384a9","Type":"ContainerStarted","Data":"ef58403f2193c87cd86821d03d2dcb8de010e56538a18ba00cb1bc46efab8c12"} Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.705769 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" podStartSLOduration=2.9694064 podStartE2EDuration="3.705743985s" podCreationTimestamp="2026-01-31 06:14:57 +0000 UTC" firstStartedPulling="2026-01-31 06:14:58.632950241 +0000 UTC m=+2164.726832082" lastFinishedPulling="2026-01-31 06:14:59.369287826 +0000 UTC m=+2165.463169667" observedRunningTime="2026-01-31 06:15:00.701585109 +0000 UTC m=+2166.795466970" watchObservedRunningTime="2026-01-31 06:15:00.705743985 +0000 UTC m=+2166.799625826" Jan 31 06:15:00 crc kubenswrapper[4712]: I0131 06:15:00.758872 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:01 crc kubenswrapper[4712]: I0131 06:15:01.241297 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x"] Jan 31 06:15:01 crc kubenswrapper[4712]: W0131 06:15:01.251132 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b47ae7f_713f_4f37_af6c_112e264c6ef7.slice/crio-2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0 WatchSource:0}: Error finding container 2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0: Status 404 returned error can't find the container with id 2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0 Jan 31 06:15:01 crc kubenswrapper[4712]: I0131 06:15:01.691571 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" event={"ID":"0b47ae7f-713f-4f37-af6c-112e264c6ef7","Type":"ContainerStarted","Data":"cc8f947125e7bc98bdfa3f9fa7c551c6c67e007d9ff90c42b71c2e356ade17d7"} Jan 31 06:15:01 crc kubenswrapper[4712]: I0131 06:15:01.692127 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" event={"ID":"0b47ae7f-713f-4f37-af6c-112e264c6ef7","Type":"ContainerStarted","Data":"2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0"} Jan 31 06:15:01 crc kubenswrapper[4712]: I0131 06:15:01.722501 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" podStartSLOduration=1.722478165 podStartE2EDuration="1.722478165s" podCreationTimestamp="2026-01-31 06:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:15:01.70959793 +0000 UTC m=+2167.803479771" watchObservedRunningTime="2026-01-31 06:15:01.722478165 +0000 UTC m=+2167.816360006" Jan 31 06:15:02 crc kubenswrapper[4712]: I0131 06:15:02.701642 4712 generic.go:334] "Generic (PLEG): container finished" podID="0b47ae7f-713f-4f37-af6c-112e264c6ef7" containerID="cc8f947125e7bc98bdfa3f9fa7c551c6c67e007d9ff90c42b71c2e356ade17d7" exitCode=0 Jan 31 06:15:02 crc kubenswrapper[4712]: I0131 06:15:02.701742 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" event={"ID":"0b47ae7f-713f-4f37-af6c-112e264c6ef7","Type":"ContainerDied","Data":"cc8f947125e7bc98bdfa3f9fa7c551c6c67e007d9ff90c42b71c2e356ade17d7"} Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.115404 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.202996 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jj24\" (UniqueName: \"kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24\") pod \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.203098 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume\") pod \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.203189 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume\") pod \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\" (UID: \"0b47ae7f-713f-4f37-af6c-112e264c6ef7\") " Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.204239 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume" (OuterVolumeSpecName: "config-volume") pod "0b47ae7f-713f-4f37-af6c-112e264c6ef7" (UID: "0b47ae7f-713f-4f37-af6c-112e264c6ef7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.209880 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0b47ae7f-713f-4f37-af6c-112e264c6ef7" (UID: "0b47ae7f-713f-4f37-af6c-112e264c6ef7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.212025 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24" (OuterVolumeSpecName: "kube-api-access-4jj24") pod "0b47ae7f-713f-4f37-af6c-112e264c6ef7" (UID: "0b47ae7f-713f-4f37-af6c-112e264c6ef7"). InnerVolumeSpecName "kube-api-access-4jj24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.304996 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jj24\" (UniqueName: \"kubernetes.io/projected/0b47ae7f-713f-4f37-af6c-112e264c6ef7-kube-api-access-4jj24\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.305063 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b47ae7f-713f-4f37-af6c-112e264c6ef7-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.305077 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b47ae7f-713f-4f37-af6c-112e264c6ef7-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.308654 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q"] Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.318743 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497290-bbd4q"] Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.518883 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dad1b83-c7ab-4bc8-862b-78668f39bf8d" path="/var/lib/kubelet/pods/7dad1b83-c7ab-4bc8-862b-78668f39bf8d/volumes" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.722551 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" event={"ID":"0b47ae7f-713f-4f37-af6c-112e264c6ef7","Type":"ContainerDied","Data":"2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0"} Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.722607 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c0f80a6a6ec7f8e6006f610571720a64940da7d5351d732e5c6078eb85e0be0" Jan 31 06:15:04 crc kubenswrapper[4712]: I0131 06:15:04.722619 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x" Jan 31 06:15:12 crc kubenswrapper[4712]: I0131 06:15:12.496944 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:15:12 crc kubenswrapper[4712]: I0131 06:15:12.497403 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:15:20 crc kubenswrapper[4712]: I0131 06:15:20.005494 4712 scope.go:117] "RemoveContainer" containerID="b1c4ac3096018c230454730faf2842e755cacfcc217cb8c81c81b581e7723af5" Jan 31 06:15:20 crc kubenswrapper[4712]: I0131 06:15:20.053777 4712 scope.go:117] "RemoveContainer" containerID="298df98efa2779a45cfad38893554bf27b23429e13d7d7a2182c78473ea2ce69" Jan 31 06:15:42 crc kubenswrapper[4712]: I0131 06:15:42.498034 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:15:42 crc kubenswrapper[4712]: I0131 06:15:42.498819 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:15:45 crc kubenswrapper[4712]: I0131 06:15:45.081236 4712 generic.go:334] "Generic (PLEG): container finished" podID="8a24ba49-5360-4c66-a06d-36f6915384a9" containerID="ef58403f2193c87cd86821d03d2dcb8de010e56538a18ba00cb1bc46efab8c12" exitCode=0 Jan 31 06:15:45 crc kubenswrapper[4712]: I0131 06:15:45.081411 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" event={"ID":"8a24ba49-5360-4c66-a06d-36f6915384a9","Type":"ContainerDied","Data":"ef58403f2193c87cd86821d03d2dcb8de010e56538a18ba00cb1bc46efab8c12"} Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.480460 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.545153 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory\") pod \"8a24ba49-5360-4c66-a06d-36f6915384a9\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.545703 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam\") pod \"8a24ba49-5360-4c66-a06d-36f6915384a9\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.546031 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x74d\" (UniqueName: \"kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d\") pod \"8a24ba49-5360-4c66-a06d-36f6915384a9\" (UID: \"8a24ba49-5360-4c66-a06d-36f6915384a9\") " Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.560592 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d" (OuterVolumeSpecName: "kube-api-access-2x74d") pod "8a24ba49-5360-4c66-a06d-36f6915384a9" (UID: "8a24ba49-5360-4c66-a06d-36f6915384a9"). InnerVolumeSpecName "kube-api-access-2x74d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.577729 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory" (OuterVolumeSpecName: "inventory") pod "8a24ba49-5360-4c66-a06d-36f6915384a9" (UID: "8a24ba49-5360-4c66-a06d-36f6915384a9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.587261 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8a24ba49-5360-4c66-a06d-36f6915384a9" (UID: "8a24ba49-5360-4c66-a06d-36f6915384a9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.649872 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x74d\" (UniqueName: \"kubernetes.io/projected/8a24ba49-5360-4c66-a06d-36f6915384a9-kube-api-access-2x74d\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.649913 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:46 crc kubenswrapper[4712]: I0131 06:15:46.649926 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8a24ba49-5360-4c66-a06d-36f6915384a9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.101887 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" event={"ID":"8a24ba49-5360-4c66-a06d-36f6915384a9","Type":"ContainerDied","Data":"9d87c21f89f4399a06f4410eab48ac47ff47619648696f1b2087e10dd5b0f3bb"} Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.101949 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d87c21f89f4399a06f4410eab48ac47ff47619648696f1b2087e10dd5b0f3bb" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.102029 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.198805 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-cd9mf"] Jan 31 06:15:47 crc kubenswrapper[4712]: E0131 06:15:47.199253 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a24ba49-5360-4c66-a06d-36f6915384a9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.199273 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a24ba49-5360-4c66-a06d-36f6915384a9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:15:47 crc kubenswrapper[4712]: E0131 06:15:47.199285 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b47ae7f-713f-4f37-af6c-112e264c6ef7" containerName="collect-profiles" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.199295 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b47ae7f-713f-4f37-af6c-112e264c6ef7" containerName="collect-profiles" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.199472 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a24ba49-5360-4c66-a06d-36f6915384a9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.199497 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b47ae7f-713f-4f37-af6c-112e264c6ef7" containerName="collect-profiles" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.200132 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.208042 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.208087 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.208154 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.208621 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.223132 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-cd9mf"] Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.263702 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.263819 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htg28\" (UniqueName: \"kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.263865 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.365731 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htg28\" (UniqueName: \"kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.365813 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.365947 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.370650 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.371049 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.384277 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htg28\" (UniqueName: \"kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28\") pod \"ssh-known-hosts-edpm-deployment-cd9mf\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:47 crc kubenswrapper[4712]: I0131 06:15:47.520812 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:48 crc kubenswrapper[4712]: I0131 06:15:48.088613 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-cd9mf"] Jan 31 06:15:48 crc kubenswrapper[4712]: I0131 06:15:48.112424 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" event={"ID":"d0cbadc8-9972-41d6-9313-0337cb84f72d","Type":"ContainerStarted","Data":"95d89390e51e28e4b7f649b8be4b297c015c7c681e8855ff5e244051087b57ab"} Jan 31 06:15:50 crc kubenswrapper[4712]: I0131 06:15:50.135717 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" event={"ID":"d0cbadc8-9972-41d6-9313-0337cb84f72d","Type":"ContainerStarted","Data":"c120c00d336649bca247bcc91eb1acde9d523fe1938cb6c7e6f060782023d525"} Jan 31 06:15:50 crc kubenswrapper[4712]: I0131 06:15:50.173997 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" podStartSLOduration=1.801612843 podStartE2EDuration="3.173964424s" podCreationTimestamp="2026-01-31 06:15:47 +0000 UTC" firstStartedPulling="2026-01-31 06:15:48.085412889 +0000 UTC m=+2214.179294770" lastFinishedPulling="2026-01-31 06:15:49.45776451 +0000 UTC m=+2215.551646351" observedRunningTime="2026-01-31 06:15:50.158241003 +0000 UTC m=+2216.252122854" watchObservedRunningTime="2026-01-31 06:15:50.173964424 +0000 UTC m=+2216.267846305" Jan 31 06:15:57 crc kubenswrapper[4712]: I0131 06:15:57.195966 4712 generic.go:334] "Generic (PLEG): container finished" podID="d0cbadc8-9972-41d6-9313-0337cb84f72d" containerID="c120c00d336649bca247bcc91eb1acde9d523fe1938cb6c7e6f060782023d525" exitCode=0 Jan 31 06:15:57 crc kubenswrapper[4712]: I0131 06:15:57.196044 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" event={"ID":"d0cbadc8-9972-41d6-9313-0337cb84f72d","Type":"ContainerDied","Data":"c120c00d336649bca247bcc91eb1acde9d523fe1938cb6c7e6f060782023d525"} Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.680460 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.832331 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htg28\" (UniqueName: \"kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28\") pod \"d0cbadc8-9972-41d6-9313-0337cb84f72d\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.832465 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam\") pod \"d0cbadc8-9972-41d6-9313-0337cb84f72d\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.832555 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0\") pod \"d0cbadc8-9972-41d6-9313-0337cb84f72d\" (UID: \"d0cbadc8-9972-41d6-9313-0337cb84f72d\") " Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.841459 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28" (OuterVolumeSpecName: "kube-api-access-htg28") pod "d0cbadc8-9972-41d6-9313-0337cb84f72d" (UID: "d0cbadc8-9972-41d6-9313-0337cb84f72d"). InnerVolumeSpecName "kube-api-access-htg28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.869392 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "d0cbadc8-9972-41d6-9313-0337cb84f72d" (UID: "d0cbadc8-9972-41d6-9313-0337cb84f72d"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.869860 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d0cbadc8-9972-41d6-9313-0337cb84f72d" (UID: "d0cbadc8-9972-41d6-9313-0337cb84f72d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.935582 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.935641 4712 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d0cbadc8-9972-41d6-9313-0337cb84f72d-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:58 crc kubenswrapper[4712]: I0131 06:15:58.935652 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htg28\" (UniqueName: \"kubernetes.io/projected/d0cbadc8-9972-41d6-9313-0337cb84f72d-kube-api-access-htg28\") on node \"crc\" DevicePath \"\"" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.214805 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" event={"ID":"d0cbadc8-9972-41d6-9313-0337cb84f72d","Type":"ContainerDied","Data":"95d89390e51e28e4b7f649b8be4b297c015c7c681e8855ff5e244051087b57ab"} Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.214851 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95d89390e51e28e4b7f649b8be4b297c015c7c681e8855ff5e244051087b57ab" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.214871 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-cd9mf" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.295014 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27"] Jan 31 06:15:59 crc kubenswrapper[4712]: E0131 06:15:59.295547 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0cbadc8-9972-41d6-9313-0337cb84f72d" containerName="ssh-known-hosts-edpm-deployment" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.295573 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0cbadc8-9972-41d6-9313-0337cb84f72d" containerName="ssh-known-hosts-edpm-deployment" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.295750 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0cbadc8-9972-41d6-9313-0337cb84f72d" containerName="ssh-known-hosts-edpm-deployment" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.296550 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.301230 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.301543 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.301729 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.302379 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.306140 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27"] Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.445961 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.446653 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.447133 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsczw\" (UniqueName: \"kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.548922 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.548999 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsczw\" (UniqueName: \"kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.549102 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.554940 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.555416 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.567705 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsczw\" (UniqueName: \"kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-w4v27\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:15:59 crc kubenswrapper[4712]: I0131 06:15:59.618549 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:16:00 crc kubenswrapper[4712]: I0131 06:16:00.179490 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27"] Jan 31 06:16:00 crc kubenswrapper[4712]: I0131 06:16:00.226991 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" event={"ID":"ef660f01-216f-4f2e-89b8-55e0fb24c506","Type":"ContainerStarted","Data":"8eddb1eaf807601578cdb36820c44359ab4a633c0c381c533c3f7843b9fd9d7c"} Jan 31 06:16:01 crc kubenswrapper[4712]: I0131 06:16:01.250964 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" event={"ID":"ef660f01-216f-4f2e-89b8-55e0fb24c506","Type":"ContainerStarted","Data":"4326da2d00cd9b2aa0f7c50d41ef7f6cd09c710a5c01a6ae914eafb6878c411f"} Jan 31 06:16:01 crc kubenswrapper[4712]: I0131 06:16:01.272479 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" podStartSLOduration=1.806330199 podStartE2EDuration="2.272458791s" podCreationTimestamp="2026-01-31 06:15:59 +0000 UTC" firstStartedPulling="2026-01-31 06:16:00.186292259 +0000 UTC m=+2226.280174100" lastFinishedPulling="2026-01-31 06:16:00.652420851 +0000 UTC m=+2226.746302692" observedRunningTime="2026-01-31 06:16:01.269912873 +0000 UTC m=+2227.363794724" watchObservedRunningTime="2026-01-31 06:16:01.272458791 +0000 UTC m=+2227.366340632" Jan 31 06:16:08 crc kubenswrapper[4712]: I0131 06:16:08.318342 4712 generic.go:334] "Generic (PLEG): container finished" podID="ef660f01-216f-4f2e-89b8-55e0fb24c506" containerID="4326da2d00cd9b2aa0f7c50d41ef7f6cd09c710a5c01a6ae914eafb6878c411f" exitCode=0 Jan 31 06:16:08 crc kubenswrapper[4712]: I0131 06:16:08.318463 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" event={"ID":"ef660f01-216f-4f2e-89b8-55e0fb24c506","Type":"ContainerDied","Data":"4326da2d00cd9b2aa0f7c50d41ef7f6cd09c710a5c01a6ae914eafb6878c411f"} Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.740690 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.872953 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam\") pod \"ef660f01-216f-4f2e-89b8-55e0fb24c506\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.873408 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsczw\" (UniqueName: \"kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw\") pod \"ef660f01-216f-4f2e-89b8-55e0fb24c506\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.873541 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory\") pod \"ef660f01-216f-4f2e-89b8-55e0fb24c506\" (UID: \"ef660f01-216f-4f2e-89b8-55e0fb24c506\") " Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.882945 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw" (OuterVolumeSpecName: "kube-api-access-vsczw") pod "ef660f01-216f-4f2e-89b8-55e0fb24c506" (UID: "ef660f01-216f-4f2e-89b8-55e0fb24c506"). InnerVolumeSpecName "kube-api-access-vsczw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.902307 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ef660f01-216f-4f2e-89b8-55e0fb24c506" (UID: "ef660f01-216f-4f2e-89b8-55e0fb24c506"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.912731 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory" (OuterVolumeSpecName: "inventory") pod "ef660f01-216f-4f2e-89b8-55e0fb24c506" (UID: "ef660f01-216f-4f2e-89b8-55e0fb24c506"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.976244 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.976278 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ef660f01-216f-4f2e-89b8-55e0fb24c506-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:09 crc kubenswrapper[4712]: I0131 06:16:09.976290 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsczw\" (UniqueName: \"kubernetes.io/projected/ef660f01-216f-4f2e-89b8-55e0fb24c506-kube-api-access-vsczw\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.335949 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" event={"ID":"ef660f01-216f-4f2e-89b8-55e0fb24c506","Type":"ContainerDied","Data":"8eddb1eaf807601578cdb36820c44359ab4a633c0c381c533c3f7843b9fd9d7c"} Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.335995 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eddb1eaf807601578cdb36820c44359ab4a633c0c381c533c3f7843b9fd9d7c" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.336346 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-w4v27" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.415825 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm"] Jan 31 06:16:10 crc kubenswrapper[4712]: E0131 06:16:10.416597 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef660f01-216f-4f2e-89b8-55e0fb24c506" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.416622 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef660f01-216f-4f2e-89b8-55e0fb24c506" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.416888 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef660f01-216f-4f2e-89b8-55e0fb24c506" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.417860 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.420476 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.420508 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.420795 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.420916 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.456358 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm"] Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.488041 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpwg8\" (UniqueName: \"kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.488223 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.488303 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.590488 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.590561 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.590703 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpwg8\" (UniqueName: \"kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.595103 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.595228 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.609341 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpwg8\" (UniqueName: \"kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:10 crc kubenswrapper[4712]: I0131 06:16:10.743802 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:11 crc kubenswrapper[4712]: I0131 06:16:11.287267 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm"] Jan 31 06:16:11 crc kubenswrapper[4712]: I0131 06:16:11.349383 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" event={"ID":"8e2c8610-b420-4018-b0d3-62afdc779dba","Type":"ContainerStarted","Data":"5bea8576753a8d7179f26c96682aa0835e7ccb14d7a5bf43d1f125fc7336f624"} Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.362924 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" event={"ID":"8e2c8610-b420-4018-b0d3-62afdc779dba","Type":"ContainerStarted","Data":"c68698073887960ac9c7745fdf52e7df469df8ef55313e79fe63f301935e2c89"} Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.386864 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" podStartSLOduration=1.69375704 podStartE2EDuration="2.386844694s" podCreationTimestamp="2026-01-31 06:16:10 +0000 UTC" firstStartedPulling="2026-01-31 06:16:11.291316767 +0000 UTC m=+2237.385198608" lastFinishedPulling="2026-01-31 06:16:11.984404421 +0000 UTC m=+2238.078286262" observedRunningTime="2026-01-31 06:16:12.384717185 +0000 UTC m=+2238.478599026" watchObservedRunningTime="2026-01-31 06:16:12.386844694 +0000 UTC m=+2238.480726535" Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.497626 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.497709 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.497768 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.499437 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:16:12 crc kubenswrapper[4712]: I0131 06:16:12.499541 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3" gracePeriod=600 Jan 31 06:16:13 crc kubenswrapper[4712]: I0131 06:16:13.385148 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3" exitCode=0 Jan 31 06:16:13 crc kubenswrapper[4712]: I0131 06:16:13.385218 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3"} Jan 31 06:16:13 crc kubenswrapper[4712]: I0131 06:16:13.385598 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785"} Jan 31 06:16:13 crc kubenswrapper[4712]: I0131 06:16:13.385626 4712 scope.go:117] "RemoveContainer" containerID="c5fbd54f561d4af9239ab57ece547d79f914d0cd4fac24ca8d9526fce6ddf453" Jan 31 06:16:21 crc kubenswrapper[4712]: I0131 06:16:21.460459 4712 generic.go:334] "Generic (PLEG): container finished" podID="8e2c8610-b420-4018-b0d3-62afdc779dba" containerID="c68698073887960ac9c7745fdf52e7df469df8ef55313e79fe63f301935e2c89" exitCode=0 Jan 31 06:16:21 crc kubenswrapper[4712]: I0131 06:16:21.460696 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" event={"ID":"8e2c8610-b420-4018-b0d3-62afdc779dba","Type":"ContainerDied","Data":"c68698073887960ac9c7745fdf52e7df469df8ef55313e79fe63f301935e2c89"} Jan 31 06:16:22 crc kubenswrapper[4712]: I0131 06:16:22.879213 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:22 crc kubenswrapper[4712]: I0131 06:16:22.974878 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpwg8\" (UniqueName: \"kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8\") pod \"8e2c8610-b420-4018-b0d3-62afdc779dba\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " Jan 31 06:16:22 crc kubenswrapper[4712]: I0131 06:16:22.974948 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam\") pod \"8e2c8610-b420-4018-b0d3-62afdc779dba\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " Jan 31 06:16:22 crc kubenswrapper[4712]: I0131 06:16:22.975041 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory\") pod \"8e2c8610-b420-4018-b0d3-62afdc779dba\" (UID: \"8e2c8610-b420-4018-b0d3-62afdc779dba\") " Jan 31 06:16:22 crc kubenswrapper[4712]: I0131 06:16:22.987294 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8" (OuterVolumeSpecName: "kube-api-access-xpwg8") pod "8e2c8610-b420-4018-b0d3-62afdc779dba" (UID: "8e2c8610-b420-4018-b0d3-62afdc779dba"). InnerVolumeSpecName "kube-api-access-xpwg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.010145 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory" (OuterVolumeSpecName: "inventory") pod "8e2c8610-b420-4018-b0d3-62afdc779dba" (UID: "8e2c8610-b420-4018-b0d3-62afdc779dba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.010329 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8e2c8610-b420-4018-b0d3-62afdc779dba" (UID: "8e2c8610-b420-4018-b0d3-62afdc779dba"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.077126 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpwg8\" (UniqueName: \"kubernetes.io/projected/8e2c8610-b420-4018-b0d3-62afdc779dba-kube-api-access-xpwg8\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.077514 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.077525 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e2c8610-b420-4018-b0d3-62afdc779dba-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.482607 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" event={"ID":"8e2c8610-b420-4018-b0d3-62afdc779dba","Type":"ContainerDied","Data":"5bea8576753a8d7179f26c96682aa0835e7ccb14d7a5bf43d1f125fc7336f624"} Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.482664 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bea8576753a8d7179f26c96682aa0835e7ccb14d7a5bf43d1f125fc7336f624" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.482684 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.581825 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596"] Jan 31 06:16:23 crc kubenswrapper[4712]: E0131 06:16:23.582575 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e2c8610-b420-4018-b0d3-62afdc779dba" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.582605 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e2c8610-b420-4018-b0d3-62afdc779dba" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.582880 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e2c8610-b420-4018-b0d3-62afdc779dba" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.583832 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.588509 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.588519 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.588811 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.589004 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.589023 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.589192 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.589311 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.596735 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596"] Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.602667 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690154 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690323 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690363 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690402 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690434 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690600 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqj92\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690647 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690695 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690810 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690938 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.690977 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.691012 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.691066 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.691095 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793136 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793332 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793359 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793388 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793415 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.793442 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqj92\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794002 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794048 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794109 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794141 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794165 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794204 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794226 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.794261 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.798122 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.798237 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.798243 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.799373 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.799425 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.800224 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.801216 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.802990 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.804201 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.804820 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.804829 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.809841 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.812386 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.815291 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqj92\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t9596\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:23 crc kubenswrapper[4712]: I0131 06:16:23.906581 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:16:24 crc kubenswrapper[4712]: I0131 06:16:24.457338 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596"] Jan 31 06:16:24 crc kubenswrapper[4712]: I0131 06:16:24.497777 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" event={"ID":"1b9eefb9-d787-4042-b8cf-b1d7160c09a4","Type":"ContainerStarted","Data":"59709a5a5a1dc47b8f3f359c45ef861ef403f50c5d2cebfba5e4e85a70d6a222"} Jan 31 06:16:25 crc kubenswrapper[4712]: I0131 06:16:25.512617 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" event={"ID":"1b9eefb9-d787-4042-b8cf-b1d7160c09a4","Type":"ContainerStarted","Data":"b345f014071fddad5d34042ff555819b485a097a18faa7f4a940e0cddb1fb94a"} Jan 31 06:17:01 crc kubenswrapper[4712]: E0131 06:17:01.007733 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b9eefb9_d787_4042_b8cf_b1d7160c09a4.slice/crio-conmon-b345f014071fddad5d34042ff555819b485a097a18faa7f4a940e0cddb1fb94a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b9eefb9_d787_4042_b8cf_b1d7160c09a4.slice/crio-b345f014071fddad5d34042ff555819b485a097a18faa7f4a940e0cddb1fb94a.scope\": RecentStats: unable to find data in memory cache]" Jan 31 06:17:01 crc kubenswrapper[4712]: I0131 06:17:01.836554 4712 generic.go:334] "Generic (PLEG): container finished" podID="1b9eefb9-d787-4042-b8cf-b1d7160c09a4" containerID="b345f014071fddad5d34042ff555819b485a097a18faa7f4a940e0cddb1fb94a" exitCode=0 Jan 31 06:17:01 crc kubenswrapper[4712]: I0131 06:17:01.836642 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" event={"ID":"1b9eefb9-d787-4042-b8cf-b1d7160c09a4","Type":"ContainerDied","Data":"b345f014071fddad5d34042ff555819b485a097a18faa7f4a940e0cddb1fb94a"} Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.277058 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.368403 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.368503 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.368554 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.368667 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.368756 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369575 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369653 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqj92\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369727 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369793 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369864 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369904 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369950 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.369988 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.370032 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle\") pod \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\" (UID: \"1b9eefb9-d787-4042-b8cf-b1d7160c09a4\") " Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380461 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380449 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380502 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380550 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380764 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380862 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92" (OuterVolumeSpecName: "kube-api-access-vqj92") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "kube-api-access-vqj92". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380896 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.380912 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.386613 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.386665 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.386735 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.389588 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.409074 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory" (OuterVolumeSpecName: "inventory") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.410481 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1b9eefb9-d787-4042-b8cf-b1d7160c09a4" (UID: "1b9eefb9-d787-4042-b8cf-b1d7160c09a4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473720 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473765 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473780 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473794 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqj92\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-kube-api-access-vqj92\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473810 4712 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473822 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473834 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473850 4712 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473866 4712 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473879 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473892 4712 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473905 4712 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473917 4712 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.473929 4712 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9eefb9-d787-4042-b8cf-b1d7160c09a4-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.857938 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" event={"ID":"1b9eefb9-d787-4042-b8cf-b1d7160c09a4","Type":"ContainerDied","Data":"59709a5a5a1dc47b8f3f359c45ef861ef403f50c5d2cebfba5e4e85a70d6a222"} Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.857984 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59709a5a5a1dc47b8f3f359c45ef861ef403f50c5d2cebfba5e4e85a70d6a222" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.858009 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t9596" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.967515 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd"] Jan 31 06:17:03 crc kubenswrapper[4712]: E0131 06:17:03.968297 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b9eefb9-d787-4042-b8cf-b1d7160c09a4" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.968323 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b9eefb9-d787-4042-b8cf-b1d7160c09a4" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.968612 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b9eefb9-d787-4042-b8cf-b1d7160c09a4" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.969410 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.972392 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.972776 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.974111 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.974593 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.975255 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:17:03 crc kubenswrapper[4712]: I0131 06:17:03.977589 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd"] Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.019502 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.019662 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jglgq\" (UniqueName: \"kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.019696 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.019736 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.020128 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.122039 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.122127 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.122219 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jglgq\" (UniqueName: \"kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.122249 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.122282 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.123862 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.130569 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.130784 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.130844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.143197 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jglgq\" (UniqueName: \"kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-2lsmd\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.334840 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:17:04 crc kubenswrapper[4712]: I0131 06:17:04.904330 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd"] Jan 31 06:17:05 crc kubenswrapper[4712]: I0131 06:17:05.877875 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" event={"ID":"66f934d4-4354-4d23-80e4-0fd0b6facf41","Type":"ContainerStarted","Data":"22ae2632f7d78b121acbbdf191b5f2f292bab5ef24ddb782552bc3ab66ef2f57"} Jan 31 06:17:06 crc kubenswrapper[4712]: I0131 06:17:06.888062 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" event={"ID":"66f934d4-4354-4d23-80e4-0fd0b6facf41","Type":"ContainerStarted","Data":"dc8c2fca69ad38e0f144da3291a39b1d3a2dc867b86d693dd51283baadb2d5ae"} Jan 31 06:17:06 crc kubenswrapper[4712]: I0131 06:17:06.915677 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" podStartSLOduration=3.018030729 podStartE2EDuration="3.91564744s" podCreationTimestamp="2026-01-31 06:17:03 +0000 UTC" firstStartedPulling="2026-01-31 06:17:04.909836562 +0000 UTC m=+2291.003718403" lastFinishedPulling="2026-01-31 06:17:05.807453263 +0000 UTC m=+2291.901335114" observedRunningTime="2026-01-31 06:17:06.90780216 +0000 UTC m=+2293.001684011" watchObservedRunningTime="2026-01-31 06:17:06.91564744 +0000 UTC m=+2293.009529281" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.509091 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.511810 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.536070 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.630450 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.630629 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.630689 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkp8r\" (UniqueName: \"kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.732986 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.733466 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkp8r\" (UniqueName: \"kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.733592 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.733774 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.734343 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.763343 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkp8r\" (UniqueName: \"kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r\") pod \"community-operators-fj5pt\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:55 crc kubenswrapper[4712]: I0131 06:17:55.835836 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:17:56 crc kubenswrapper[4712]: I0131 06:17:56.393103 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.350601 4712 generic.go:334] "Generic (PLEG): container finished" podID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerID="970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97" exitCode=0 Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.350720 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerDied","Data":"970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97"} Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.350945 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerStarted","Data":"1c6d0e2d549b350f73ec583fbff3ed54f00642015809deb3ae187c3d3e795e4b"} Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.353227 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.895468 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.899474 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.909816 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.993488 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.993847 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:57 crc kubenswrapper[4712]: I0131 06:17:57.994081 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z777t\" (UniqueName: \"kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.096007 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z777t\" (UniqueName: \"kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.096263 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.096308 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.096921 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.097148 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.119805 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z777t\" (UniqueName: \"kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t\") pod \"redhat-marketplace-6kt6w\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.225534 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.386126 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerStarted","Data":"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3"} Jan 31 06:17:58 crc kubenswrapper[4712]: I0131 06:17:58.554329 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:17:58 crc kubenswrapper[4712]: W0131 06:17:58.568481 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50dbb8c8_cd21_450d_bc6a_bae3f2a5c0c7.slice/crio-1a57cc718c67e2c6df3180ec482d1a1cc8adf47e23c7414a755c20ae59b839bf WatchSource:0}: Error finding container 1a57cc718c67e2c6df3180ec482d1a1cc8adf47e23c7414a755c20ae59b839bf: Status 404 returned error can't find the container with id 1a57cc718c67e2c6df3180ec482d1a1cc8adf47e23c7414a755c20ae59b839bf Jan 31 06:17:59 crc kubenswrapper[4712]: I0131 06:17:59.396930 4712 generic.go:334] "Generic (PLEG): container finished" podID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerID="24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3" exitCode=0 Jan 31 06:17:59 crc kubenswrapper[4712]: I0131 06:17:59.397098 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerDied","Data":"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3"} Jan 31 06:17:59 crc kubenswrapper[4712]: I0131 06:17:59.401106 4712 generic.go:334] "Generic (PLEG): container finished" podID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerID="18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9" exitCode=0 Jan 31 06:17:59 crc kubenswrapper[4712]: I0131 06:17:59.401143 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerDied","Data":"18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9"} Jan 31 06:17:59 crc kubenswrapper[4712]: I0131 06:17:59.401167 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerStarted","Data":"1a57cc718c67e2c6df3180ec482d1a1cc8adf47e23c7414a755c20ae59b839bf"} Jan 31 06:18:00 crc kubenswrapper[4712]: I0131 06:18:00.414527 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerStarted","Data":"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d"} Jan 31 06:18:00 crc kubenswrapper[4712]: I0131 06:18:00.417005 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerStarted","Data":"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1"} Jan 31 06:18:00 crc kubenswrapper[4712]: I0131 06:18:00.440873 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fj5pt" podStartSLOduration=2.862940108 podStartE2EDuration="5.440854075s" podCreationTimestamp="2026-01-31 06:17:55 +0000 UTC" firstStartedPulling="2026-01-31 06:17:57.353004142 +0000 UTC m=+2343.446885983" lastFinishedPulling="2026-01-31 06:17:59.930918109 +0000 UTC m=+2346.024799950" observedRunningTime="2026-01-31 06:18:00.437353561 +0000 UTC m=+2346.531235402" watchObservedRunningTime="2026-01-31 06:18:00.440854075 +0000 UTC m=+2346.534735906" Jan 31 06:18:01 crc kubenswrapper[4712]: I0131 06:18:01.436256 4712 generic.go:334] "Generic (PLEG): container finished" podID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerID="ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1" exitCode=0 Jan 31 06:18:01 crc kubenswrapper[4712]: I0131 06:18:01.436450 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerDied","Data":"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1"} Jan 31 06:18:05 crc kubenswrapper[4712]: I0131 06:18:05.836621 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:05 crc kubenswrapper[4712]: I0131 06:18:05.837020 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:05 crc kubenswrapper[4712]: I0131 06:18:05.881331 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:06 crc kubenswrapper[4712]: I0131 06:18:06.485704 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerStarted","Data":"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b"} Jan 31 06:18:06 crc kubenswrapper[4712]: I0131 06:18:06.521744 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6kt6w" podStartSLOduration=2.889377453 podStartE2EDuration="9.521706757s" podCreationTimestamp="2026-01-31 06:17:57 +0000 UTC" firstStartedPulling="2026-01-31 06:17:59.403257993 +0000 UTC m=+2345.497139834" lastFinishedPulling="2026-01-31 06:18:06.035587297 +0000 UTC m=+2352.129469138" observedRunningTime="2026-01-31 06:18:06.519124845 +0000 UTC m=+2352.613006686" watchObservedRunningTime="2026-01-31 06:18:06.521706757 +0000 UTC m=+2352.615588598" Jan 31 06:18:06 crc kubenswrapper[4712]: I0131 06:18:06.539627 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:07 crc kubenswrapper[4712]: I0131 06:18:07.497618 4712 generic.go:334] "Generic (PLEG): container finished" podID="66f934d4-4354-4d23-80e4-0fd0b6facf41" containerID="dc8c2fca69ad38e0f144da3291a39b1d3a2dc867b86d693dd51283baadb2d5ae" exitCode=0 Jan 31 06:18:07 crc kubenswrapper[4712]: I0131 06:18:07.497708 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" event={"ID":"66f934d4-4354-4d23-80e4-0fd0b6facf41","Type":"ContainerDied","Data":"dc8c2fca69ad38e0f144da3291a39b1d3a2dc867b86d693dd51283baadb2d5ae"} Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.226097 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.226418 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.287042 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.883223 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.883896 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fj5pt" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="registry-server" containerID="cri-o://80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d" gracePeriod=2 Jan 31 06:18:08 crc kubenswrapper[4712]: I0131 06:18:08.927403 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.060842 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jglgq\" (UniqueName: \"kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq\") pod \"66f934d4-4354-4d23-80e4-0fd0b6facf41\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.061041 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle\") pod \"66f934d4-4354-4d23-80e4-0fd0b6facf41\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.061106 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory\") pod \"66f934d4-4354-4d23-80e4-0fd0b6facf41\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.061410 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam\") pod \"66f934d4-4354-4d23-80e4-0fd0b6facf41\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.061498 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0\") pod \"66f934d4-4354-4d23-80e4-0fd0b6facf41\" (UID: \"66f934d4-4354-4d23-80e4-0fd0b6facf41\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.069042 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "66f934d4-4354-4d23-80e4-0fd0b6facf41" (UID: "66f934d4-4354-4d23-80e4-0fd0b6facf41"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.070995 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq" (OuterVolumeSpecName: "kube-api-access-jglgq") pod "66f934d4-4354-4d23-80e4-0fd0b6facf41" (UID: "66f934d4-4354-4d23-80e4-0fd0b6facf41"). InnerVolumeSpecName "kube-api-access-jglgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.135154 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "66f934d4-4354-4d23-80e4-0fd0b6facf41" (UID: "66f934d4-4354-4d23-80e4-0fd0b6facf41"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.136065 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "66f934d4-4354-4d23-80e4-0fd0b6facf41" (UID: "66f934d4-4354-4d23-80e4-0fd0b6facf41"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.143427 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory" (OuterVolumeSpecName: "inventory") pod "66f934d4-4354-4d23-80e4-0fd0b6facf41" (UID: "66f934d4-4354-4d23-80e4-0fd0b6facf41"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.164082 4712 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.164118 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.164128 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66f934d4-4354-4d23-80e4-0fd0b6facf41-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.164140 4712 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/66f934d4-4354-4d23-80e4-0fd0b6facf41-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.164149 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jglgq\" (UniqueName: \"kubernetes.io/projected/66f934d4-4354-4d23-80e4-0fd0b6facf41-kube-api-access-jglgq\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.338938 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.471779 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkp8r\" (UniqueName: \"kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r\") pod \"3b294ee5-1100-44d2-a65e-f21959a472f5\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.472150 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities\") pod \"3b294ee5-1100-44d2-a65e-f21959a472f5\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.472424 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content\") pod \"3b294ee5-1100-44d2-a65e-f21959a472f5\" (UID: \"3b294ee5-1100-44d2-a65e-f21959a472f5\") " Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.473071 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities" (OuterVolumeSpecName: "utilities") pod "3b294ee5-1100-44d2-a65e-f21959a472f5" (UID: "3b294ee5-1100-44d2-a65e-f21959a472f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.476271 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r" (OuterVolumeSpecName: "kube-api-access-tkp8r") pod "3b294ee5-1100-44d2-a65e-f21959a472f5" (UID: "3b294ee5-1100-44d2-a65e-f21959a472f5"). InnerVolumeSpecName "kube-api-access-tkp8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.521510 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fj5pt" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.521448 4712 generic.go:334] "Generic (PLEG): container finished" podID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerID="80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d" exitCode=0 Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.521507 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerDied","Data":"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d"} Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.521683 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fj5pt" event={"ID":"3b294ee5-1100-44d2-a65e-f21959a472f5","Type":"ContainerDied","Data":"1c6d0e2d549b350f73ec583fbff3ed54f00642015809deb3ae187c3d3e795e4b"} Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.521734 4712 scope.go:117] "RemoveContainer" containerID="80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.524510 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" event={"ID":"66f934d4-4354-4d23-80e4-0fd0b6facf41","Type":"ContainerDied","Data":"22ae2632f7d78b121acbbdf191b5f2f292bab5ef24ddb782552bc3ab66ef2f57"} Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.524558 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-2lsmd" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.524561 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22ae2632f7d78b121acbbdf191b5f2f292bab5ef24ddb782552bc3ab66ef2f57" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.528997 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b294ee5-1100-44d2-a65e-f21959a472f5" (UID: "3b294ee5-1100-44d2-a65e-f21959a472f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.564614 4712 scope.go:117] "RemoveContainer" containerID="24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.575459 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkp8r\" (UniqueName: \"kubernetes.io/projected/3b294ee5-1100-44d2-a65e-f21959a472f5-kube-api-access-tkp8r\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.575502 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.575513 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b294ee5-1100-44d2-a65e-f21959a472f5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.611037 4712 scope.go:117] "RemoveContainer" containerID="970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.634584 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb"] Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.635034 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="extract-content" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635055 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="extract-content" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.635081 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="registry-server" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635089 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="registry-server" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.635106 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="extract-utilities" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635112 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="extract-utilities" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.635129 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f934d4-4354-4d23-80e4-0fd0b6facf41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635135 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f934d4-4354-4d23-80e4-0fd0b6facf41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635332 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" containerName="registry-server" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.635346 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f934d4-4354-4d23-80e4-0fd0b6facf41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.636053 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.639089 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.639345 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.641424 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.641528 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.641644 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.641824 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.643257 4712 scope.go:117] "RemoveContainer" containerID="80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.643679 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d\": container with ID starting with 80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d not found: ID does not exist" containerID="80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.643703 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d"} err="failed to get container status \"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d\": rpc error: code = NotFound desc = could not find container \"80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d\": container with ID starting with 80336f8eaaff584def919a16c0e82dfc5e0e929ea8ad0c97a40579fdb88bd03d not found: ID does not exist" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.643727 4712 scope.go:117] "RemoveContainer" containerID="24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.644186 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3\": container with ID starting with 24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3 not found: ID does not exist" containerID="24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.644210 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3"} err="failed to get container status \"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3\": rpc error: code = NotFound desc = could not find container \"24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3\": container with ID starting with 24ffd55a1897741eccafedacdc6947c346ca117f7f99e4e9158d529f0ef9b8c3 not found: ID does not exist" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.644227 4712 scope.go:117] "RemoveContainer" containerID="970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97" Jan 31 06:18:09 crc kubenswrapper[4712]: E0131 06:18:09.644451 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97\": container with ID starting with 970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97 not found: ID does not exist" containerID="970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.644467 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97"} err="failed to get container status \"970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97\": rpc error: code = NotFound desc = could not find container \"970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97\": container with ID starting with 970784dc2e7337d7ce09e2bae748ec7bf9d7f7147d96d7b84776f96d8ca1cf97 not found: ID does not exist" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.654127 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb"] Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782191 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm5cd\" (UniqueName: \"kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782265 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782467 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782676 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782736 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.782785 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.884740 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.884804 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.884855 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.884978 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm5cd\" (UniqueName: \"kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.885019 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.885088 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.891300 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.891394 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.892664 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.898138 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.898458 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.898554 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.900318 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fj5pt"] Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.909914 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm5cd\" (UniqueName: \"kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:09 crc kubenswrapper[4712]: I0131 06:18:09.960315 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:10 crc kubenswrapper[4712]: I0131 06:18:10.343895 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb"] Jan 31 06:18:10 crc kubenswrapper[4712]: W0131 06:18:10.344955 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod104962fa_0d0e_40b4_aacc_94ae160c761d.slice/crio-bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f WatchSource:0}: Error finding container bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f: Status 404 returned error can't find the container with id bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f Jan 31 06:18:10 crc kubenswrapper[4712]: I0131 06:18:10.513624 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b294ee5-1100-44d2-a65e-f21959a472f5" path="/var/lib/kubelet/pods/3b294ee5-1100-44d2-a65e-f21959a472f5/volumes" Jan 31 06:18:10 crc kubenswrapper[4712]: I0131 06:18:10.535235 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" event={"ID":"104962fa-0d0e-40b4-aacc-94ae160c761d","Type":"ContainerStarted","Data":"bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f"} Jan 31 06:18:11 crc kubenswrapper[4712]: I0131 06:18:11.547281 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" event={"ID":"104962fa-0d0e-40b4-aacc-94ae160c761d","Type":"ContainerStarted","Data":"ecf33b132fc2a3cfc7fbde6ea49dd6752327075a9c100373b44621dab659ea4f"} Jan 31 06:18:11 crc kubenswrapper[4712]: I0131 06:18:11.573804 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" podStartSLOduration=1.987483455 podStartE2EDuration="2.57377687s" podCreationTimestamp="2026-01-31 06:18:09 +0000 UTC" firstStartedPulling="2026-01-31 06:18:10.348414311 +0000 UTC m=+2356.442296152" lastFinishedPulling="2026-01-31 06:18:10.934707726 +0000 UTC m=+2357.028589567" observedRunningTime="2026-01-31 06:18:11.564441234 +0000 UTC m=+2357.658323075" watchObservedRunningTime="2026-01-31 06:18:11.57377687 +0000 UTC m=+2357.667658741" Jan 31 06:18:12 crc kubenswrapper[4712]: I0131 06:18:12.497127 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:18:12 crc kubenswrapper[4712]: I0131 06:18:12.497225 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:18:18 crc kubenswrapper[4712]: I0131 06:18:18.296096 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:18 crc kubenswrapper[4712]: I0131 06:18:18.354741 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:18:18 crc kubenswrapper[4712]: I0131 06:18:18.612117 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6kt6w" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="registry-server" containerID="cri-o://1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b" gracePeriod=2 Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.078099 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.211492 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z777t\" (UniqueName: \"kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t\") pod \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.211606 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content\") pod \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.211791 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities\") pod \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\" (UID: \"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7\") " Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.212769 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities" (OuterVolumeSpecName: "utilities") pod "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" (UID: "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.220601 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t" (OuterVolumeSpecName: "kube-api-access-z777t") pod "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" (UID: "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7"). InnerVolumeSpecName "kube-api-access-z777t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.239007 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" (UID: "50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.314567 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z777t\" (UniqueName: \"kubernetes.io/projected/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-kube-api-access-z777t\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.314609 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.314619 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.625361 4712 generic.go:334] "Generic (PLEG): container finished" podID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerID="1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b" exitCode=0 Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.625424 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerDied","Data":"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b"} Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.625457 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kt6w" event={"ID":"50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7","Type":"ContainerDied","Data":"1a57cc718c67e2c6df3180ec482d1a1cc8adf47e23c7414a755c20ae59b839bf"} Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.625477 4712 scope.go:117] "RemoveContainer" containerID="1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.625655 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kt6w" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.666102 4712 scope.go:117] "RemoveContainer" containerID="ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.690428 4712 scope.go:117] "RemoveContainer" containerID="18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.690530 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.690612 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kt6w"] Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.768451 4712 scope.go:117] "RemoveContainer" containerID="1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b" Jan 31 06:18:19 crc kubenswrapper[4712]: E0131 06:18:19.769423 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b\": container with ID starting with 1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b not found: ID does not exist" containerID="1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.769467 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b"} err="failed to get container status \"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b\": rpc error: code = NotFound desc = could not find container \"1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b\": container with ID starting with 1dfd50e79ca7eac19ce422b4057d6d4d66a98eb6d64ebd208ce7c7eee5bd952b not found: ID does not exist" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.769495 4712 scope.go:117] "RemoveContainer" containerID="ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1" Jan 31 06:18:19 crc kubenswrapper[4712]: E0131 06:18:19.769995 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1\": container with ID starting with ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1 not found: ID does not exist" containerID="ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.770024 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1"} err="failed to get container status \"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1\": rpc error: code = NotFound desc = could not find container \"ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1\": container with ID starting with ca39237a437211d84365a6cfa19dee38dd483716a6f3ad3e98c57b6c5f5fd1a1 not found: ID does not exist" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.770044 4712 scope.go:117] "RemoveContainer" containerID="18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9" Jan 31 06:18:19 crc kubenswrapper[4712]: E0131 06:18:19.770436 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9\": container with ID starting with 18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9 not found: ID does not exist" containerID="18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9" Jan 31 06:18:19 crc kubenswrapper[4712]: I0131 06:18:19.770462 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9"} err="failed to get container status \"18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9\": rpc error: code = NotFound desc = could not find container \"18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9\": container with ID starting with 18150a3af55232ca2c11061e2d06da633890d7fee2004d8530f68626a35b92a9 not found: ID does not exist" Jan 31 06:18:20 crc kubenswrapper[4712]: I0131 06:18:20.514933 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" path="/var/lib/kubelet/pods/50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7/volumes" Jan 31 06:18:42 crc kubenswrapper[4712]: I0131 06:18:42.497348 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:18:42 crc kubenswrapper[4712]: I0131 06:18:42.498100 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:18:56 crc kubenswrapper[4712]: I0131 06:18:56.008861 4712 generic.go:334] "Generic (PLEG): container finished" podID="104962fa-0d0e-40b4-aacc-94ae160c761d" containerID="ecf33b132fc2a3cfc7fbde6ea49dd6752327075a9c100373b44621dab659ea4f" exitCode=0 Jan 31 06:18:56 crc kubenswrapper[4712]: I0131 06:18:56.009379 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" event={"ID":"104962fa-0d0e-40b4-aacc-94ae160c761d","Type":"ContainerDied","Data":"ecf33b132fc2a3cfc7fbde6ea49dd6752327075a9c100373b44621dab659ea4f"} Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.435757 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.529831 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.529951 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.530023 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.530050 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.530165 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm5cd\" (UniqueName: \"kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.530517 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0\") pod \"104962fa-0d0e-40b4-aacc-94ae160c761d\" (UID: \"104962fa-0d0e-40b4-aacc-94ae160c761d\") " Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.537625 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd" (OuterVolumeSpecName: "kube-api-access-jm5cd") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "kube-api-access-jm5cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.538062 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.559004 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory" (OuterVolumeSpecName: "inventory") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.559042 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.559198 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.574449 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "104962fa-0d0e-40b4-aacc-94ae160c761d" (UID: "104962fa-0d0e-40b4-aacc-94ae160c761d"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633379 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633425 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633437 4712 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633447 4712 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633457 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm5cd\" (UniqueName: \"kubernetes.io/projected/104962fa-0d0e-40b4-aacc-94ae160c761d-kube-api-access-jm5cd\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:57 crc kubenswrapper[4712]: I0131 06:18:57.633468 4712 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/104962fa-0d0e-40b4-aacc-94ae160c761d-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.029947 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" event={"ID":"104962fa-0d0e-40b4-aacc-94ae160c761d","Type":"ContainerDied","Data":"bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f"} Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.030003 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bebb69f4488907c5ed88fb1a411b7bf5ed34475d5376ce2dc9b4a8d4fe84888f" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.030029 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.122721 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8"] Jan 31 06:18:58 crc kubenswrapper[4712]: E0131 06:18:58.123410 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="extract-utilities" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.123487 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="extract-utilities" Jan 31 06:18:58 crc kubenswrapper[4712]: E0131 06:18:58.123598 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104962fa-0d0e-40b4-aacc-94ae160c761d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.123655 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="104962fa-0d0e-40b4-aacc-94ae160c761d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:58 crc kubenswrapper[4712]: E0131 06:18:58.123714 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="registry-server" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.123771 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="registry-server" Jan 31 06:18:58 crc kubenswrapper[4712]: E0131 06:18:58.123843 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="extract-content" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.123897 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="extract-content" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.124124 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="104962fa-0d0e-40b4-aacc-94ae160c761d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.124209 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="50dbb8c8-cd21-450d-bc6a-bae3f2a5c0c7" containerName="registry-server" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.125222 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.131090 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.131548 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.131854 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.132115 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.132298 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.143895 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.143954 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.144011 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.144124 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsc5x\" (UniqueName: \"kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.144155 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.151353 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8"] Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.245120 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsc5x\" (UniqueName: \"kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.245446 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.245620 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.245712 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.245817 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.249460 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.249528 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.249586 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.250727 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.264770 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsc5x\" (UniqueName: \"kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x74g8\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:58 crc kubenswrapper[4712]: I0131 06:18:58.452728 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:18:59 crc kubenswrapper[4712]: I0131 06:18:59.116520 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8"] Jan 31 06:19:00 crc kubenswrapper[4712]: I0131 06:19:00.053730 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" event={"ID":"1e610581-bd59-418a-901a-7a37acc85442","Type":"ContainerStarted","Data":"f8384e7ecfef96846dfd7be08691f1aff38d97abec4b7e85eb6dbe11f70ddb5f"} Jan 31 06:19:00 crc kubenswrapper[4712]: I0131 06:19:00.054276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" event={"ID":"1e610581-bd59-418a-901a-7a37acc85442","Type":"ContainerStarted","Data":"f2b77fd4a510ef5b3edd66894c60fe61a70464f18567d5399fab57c0b6dba039"} Jan 31 06:19:00 crc kubenswrapper[4712]: I0131 06:19:00.075063 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" podStartSLOduration=1.6236853679999999 podStartE2EDuration="2.075042217s" podCreationTimestamp="2026-01-31 06:18:58 +0000 UTC" firstStartedPulling="2026-01-31 06:18:59.129154825 +0000 UTC m=+2405.223036666" lastFinishedPulling="2026-01-31 06:18:59.580511684 +0000 UTC m=+2405.674393515" observedRunningTime="2026-01-31 06:19:00.069948434 +0000 UTC m=+2406.163830275" watchObservedRunningTime="2026-01-31 06:19:00.075042217 +0000 UTC m=+2406.168924058" Jan 31 06:19:12 crc kubenswrapper[4712]: I0131 06:19:12.497644 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:19:12 crc kubenswrapper[4712]: I0131 06:19:12.498280 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:19:12 crc kubenswrapper[4712]: I0131 06:19:12.498341 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:19:12 crc kubenswrapper[4712]: I0131 06:19:12.499157 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:19:12 crc kubenswrapper[4712]: I0131 06:19:12.499316 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" gracePeriod=600 Jan 31 06:19:12 crc kubenswrapper[4712]: E0131 06:19:12.622220 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:19:13 crc kubenswrapper[4712]: I0131 06:19:13.198888 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" exitCode=0 Jan 31 06:19:13 crc kubenswrapper[4712]: I0131 06:19:13.198945 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785"} Jan 31 06:19:13 crc kubenswrapper[4712]: I0131 06:19:13.198985 4712 scope.go:117] "RemoveContainer" containerID="916ac75ea8257b89462caa7aff53970687a07ffa908afaf5f0f04ef8e035aec3" Jan 31 06:19:13 crc kubenswrapper[4712]: I0131 06:19:13.199846 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:19:13 crc kubenswrapper[4712]: E0131 06:19:13.200105 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:19:20 crc kubenswrapper[4712]: I0131 06:19:20.839637 4712 scope.go:117] "RemoveContainer" containerID="75bd6d4d3907ea002517fac21e4cd12593e5b123abe94262485a6412a8249b24" Jan 31 06:19:20 crc kubenswrapper[4712]: I0131 06:19:20.879488 4712 scope.go:117] "RemoveContainer" containerID="a082b65d3cf246abfb24b925e08fc01f0cd4f7ae6d329938f1f02ba013f83d79" Jan 31 06:19:20 crc kubenswrapper[4712]: I0131 06:19:20.911914 4712 scope.go:117] "RemoveContainer" containerID="8ad7b84e6d38cb2fdb08709e46002e498f9496afd71dcf6eed51f6d7ced3ca34" Jan 31 06:19:25 crc kubenswrapper[4712]: I0131 06:19:25.504708 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:19:25 crc kubenswrapper[4712]: E0131 06:19:25.505628 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:19:37 crc kubenswrapper[4712]: I0131 06:19:37.503960 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:19:37 crc kubenswrapper[4712]: E0131 06:19:37.505020 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:19:48 crc kubenswrapper[4712]: I0131 06:19:48.505058 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:19:48 crc kubenswrapper[4712]: E0131 06:19:48.505887 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:20:03 crc kubenswrapper[4712]: I0131 06:20:03.504918 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:20:03 crc kubenswrapper[4712]: E0131 06:20:03.505797 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:20:18 crc kubenswrapper[4712]: I0131 06:20:18.505711 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:20:18 crc kubenswrapper[4712]: E0131 06:20:18.506692 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:20:29 crc kubenswrapper[4712]: I0131 06:20:29.504725 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:20:29 crc kubenswrapper[4712]: E0131 06:20:29.505542 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:20:43 crc kubenswrapper[4712]: I0131 06:20:43.504035 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:20:43 crc kubenswrapper[4712]: E0131 06:20:43.504882 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:20:57 crc kubenswrapper[4712]: I0131 06:20:57.504211 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:20:57 crc kubenswrapper[4712]: E0131 06:20:57.505405 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:21:12 crc kubenswrapper[4712]: I0131 06:21:12.503914 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:21:12 crc kubenswrapper[4712]: E0131 06:21:12.504816 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:21:24 crc kubenswrapper[4712]: I0131 06:21:24.509531 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:21:24 crc kubenswrapper[4712]: E0131 06:21:24.510335 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:21:37 crc kubenswrapper[4712]: I0131 06:21:37.504526 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:21:37 crc kubenswrapper[4712]: E0131 06:21:37.505407 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:21:52 crc kubenswrapper[4712]: I0131 06:21:52.505312 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:21:52 crc kubenswrapper[4712]: E0131 06:21:52.505976 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:22:03 crc kubenswrapper[4712]: I0131 06:22:03.504741 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:22:03 crc kubenswrapper[4712]: E0131 06:22:03.505517 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:22:18 crc kubenswrapper[4712]: I0131 06:22:18.504418 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:22:18 crc kubenswrapper[4712]: E0131 06:22:18.506295 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:22:32 crc kubenswrapper[4712]: I0131 06:22:32.505402 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:22:32 crc kubenswrapper[4712]: E0131 06:22:32.506270 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:22:43 crc kubenswrapper[4712]: I0131 06:22:43.504283 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:22:43 crc kubenswrapper[4712]: E0131 06:22:43.505039 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:22:58 crc kubenswrapper[4712]: I0131 06:22:58.504794 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:22:58 crc kubenswrapper[4712]: E0131 06:22:58.505564 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:23:10 crc kubenswrapper[4712]: I0131 06:23:10.504920 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:23:10 crc kubenswrapper[4712]: E0131 06:23:10.505863 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:23:12 crc kubenswrapper[4712]: I0131 06:23:12.422483 4712 generic.go:334] "Generic (PLEG): container finished" podID="1e610581-bd59-418a-901a-7a37acc85442" containerID="f8384e7ecfef96846dfd7be08691f1aff38d97abec4b7e85eb6dbe11f70ddb5f" exitCode=0 Jan 31 06:23:12 crc kubenswrapper[4712]: I0131 06:23:12.422599 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" event={"ID":"1e610581-bd59-418a-901a-7a37acc85442","Type":"ContainerDied","Data":"f8384e7ecfef96846dfd7be08691f1aff38d97abec4b7e85eb6dbe11f70ddb5f"} Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.867960 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.918430 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0\") pod \"1e610581-bd59-418a-901a-7a37acc85442\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.918704 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle\") pod \"1e610581-bd59-418a-901a-7a37acc85442\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.918786 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam\") pod \"1e610581-bd59-418a-901a-7a37acc85442\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.918837 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsc5x\" (UniqueName: \"kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x\") pod \"1e610581-bd59-418a-901a-7a37acc85442\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.918955 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory\") pod \"1e610581-bd59-418a-901a-7a37acc85442\" (UID: \"1e610581-bd59-418a-901a-7a37acc85442\") " Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.925456 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "1e610581-bd59-418a-901a-7a37acc85442" (UID: "1e610581-bd59-418a-901a-7a37acc85442"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.925490 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x" (OuterVolumeSpecName: "kube-api-access-dsc5x") pod "1e610581-bd59-418a-901a-7a37acc85442" (UID: "1e610581-bd59-418a-901a-7a37acc85442"). InnerVolumeSpecName "kube-api-access-dsc5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.955351 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "1e610581-bd59-418a-901a-7a37acc85442" (UID: "1e610581-bd59-418a-901a-7a37acc85442"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.962296 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory" (OuterVolumeSpecName: "inventory") pod "1e610581-bd59-418a-901a-7a37acc85442" (UID: "1e610581-bd59-418a-901a-7a37acc85442"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:23:13 crc kubenswrapper[4712]: I0131 06:23:13.966011 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1e610581-bd59-418a-901a-7a37acc85442" (UID: "1e610581-bd59-418a-901a-7a37acc85442"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.022106 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.022187 4712 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.022205 4712 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.022219 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e610581-bd59-418a-901a-7a37acc85442-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.022234 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsc5x\" (UniqueName: \"kubernetes.io/projected/1e610581-bd59-418a-901a-7a37acc85442-kube-api-access-dsc5x\") on node \"crc\" DevicePath \"\"" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.441662 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" event={"ID":"1e610581-bd59-418a-901a-7a37acc85442","Type":"ContainerDied","Data":"f2b77fd4a510ef5b3edd66894c60fe61a70464f18567d5399fab57c0b6dba039"} Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.441936 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2b77fd4a510ef5b3edd66894c60fe61a70464f18567d5399fab57c0b6dba039" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.441751 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x74g8" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.544390 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv"] Jan 31 06:23:14 crc kubenswrapper[4712]: E0131 06:23:14.544884 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e610581-bd59-418a-901a-7a37acc85442" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.544904 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e610581-bd59-418a-901a-7a37acc85442" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.545095 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e610581-bd59-418a-901a-7a37acc85442" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.547215 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.550277 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.550413 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.550623 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.550949 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.551279 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.551598 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.553378 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.562866 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv"] Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634044 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634118 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634241 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t9q9\" (UniqueName: \"kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634321 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634407 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634425 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634493 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634513 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.634538 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736311 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t9q9\" (UniqueName: \"kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736421 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736503 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736532 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736581 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736603 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.736758 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.737425 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.737667 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.737786 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.745148 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.745402 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.745786 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.746822 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.748613 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.757047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.757588 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.775273 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t9q9\" (UniqueName: \"kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fpgv\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:14 crc kubenswrapper[4712]: I0131 06:23:14.865635 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:23:15 crc kubenswrapper[4712]: I0131 06:23:15.479083 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv"] Jan 31 06:23:15 crc kubenswrapper[4712]: I0131 06:23:15.485018 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:23:16 crc kubenswrapper[4712]: I0131 06:23:16.463812 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" event={"ID":"293b2cc8-393a-4043-ac70-89b0a519de4b","Type":"ContainerStarted","Data":"698f84f53c4ed815bd18aa96226b10cb893f6b9dc7bd63e4f53ee6fd06de9ef0"} Jan 31 06:23:17 crc kubenswrapper[4712]: I0131 06:23:17.472892 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" event={"ID":"293b2cc8-393a-4043-ac70-89b0a519de4b","Type":"ContainerStarted","Data":"3ff9932b53a4daff4de33085d8971579815f77328d7112915916ab221cdb4c40"} Jan 31 06:23:17 crc kubenswrapper[4712]: I0131 06:23:17.494094 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" podStartSLOduration=2.669744986 podStartE2EDuration="3.494071955s" podCreationTimestamp="2026-01-31 06:23:14 +0000 UTC" firstStartedPulling="2026-01-31 06:23:15.484803346 +0000 UTC m=+2661.578685177" lastFinishedPulling="2026-01-31 06:23:16.309130305 +0000 UTC m=+2662.403012146" observedRunningTime="2026-01-31 06:23:17.492982129 +0000 UTC m=+2663.586863970" watchObservedRunningTime="2026-01-31 06:23:17.494071955 +0000 UTC m=+2663.587953816" Jan 31 06:23:23 crc kubenswrapper[4712]: I0131 06:23:23.504849 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:23:23 crc kubenswrapper[4712]: E0131 06:23:23.505788 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:23:34 crc kubenswrapper[4712]: I0131 06:23:34.963600 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:23:34 crc kubenswrapper[4712]: E0131 06:23:34.965632 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:23:50 crc kubenswrapper[4712]: I0131 06:23:50.505391 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:23:50 crc kubenswrapper[4712]: E0131 06:23:50.506675 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.330715 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.336450 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.399608 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.412497 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm9nn\" (UniqueName: \"kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.412732 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.412772 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.515304 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.515386 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.515455 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm9nn\" (UniqueName: \"kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.515917 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.515943 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.544116 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm9nn\" (UniqueName: \"kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn\") pod \"redhat-operators-cjn26\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.664591 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:23:53 crc kubenswrapper[4712]: I0131 06:23:53.997911 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:23:54 crc kubenswrapper[4712]: I0131 06:23:54.108532 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerStarted","Data":"102015b23f86ff9a9b52998c5cb42391416f81cec13c448d102b540816c92aea"} Jan 31 06:23:56 crc kubenswrapper[4712]: I0131 06:23:56.134590 4712 generic.go:334] "Generic (PLEG): container finished" podID="368c7448-e142-4668-9e6d-668c716b1072" containerID="552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e" exitCode=0 Jan 31 06:23:56 crc kubenswrapper[4712]: I0131 06:23:56.134674 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerDied","Data":"552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e"} Jan 31 06:23:59 crc kubenswrapper[4712]: I0131 06:23:59.164419 4712 generic.go:334] "Generic (PLEG): container finished" podID="368c7448-e142-4668-9e6d-668c716b1072" containerID="95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461" exitCode=0 Jan 31 06:23:59 crc kubenswrapper[4712]: I0131 06:23:59.164506 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerDied","Data":"95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461"} Jan 31 06:24:01 crc kubenswrapper[4712]: I0131 06:24:01.504985 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:24:01 crc kubenswrapper[4712]: E0131 06:24:01.505943 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:24:03 crc kubenswrapper[4712]: I0131 06:24:03.209044 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerStarted","Data":"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a"} Jan 31 06:24:03 crc kubenswrapper[4712]: I0131 06:24:03.236627 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cjn26" podStartSLOduration=5.096283649 podStartE2EDuration="10.236600608s" podCreationTimestamp="2026-01-31 06:23:53 +0000 UTC" firstStartedPulling="2026-01-31 06:23:56.144637205 +0000 UTC m=+2702.238519066" lastFinishedPulling="2026-01-31 06:24:01.284954184 +0000 UTC m=+2707.378836025" observedRunningTime="2026-01-31 06:24:03.227628711 +0000 UTC m=+2709.321510562" watchObservedRunningTime="2026-01-31 06:24:03.236600608 +0000 UTC m=+2709.330482449" Jan 31 06:24:03 crc kubenswrapper[4712]: I0131 06:24:03.665132 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:03 crc kubenswrapper[4712]: I0131 06:24:03.665515 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:04 crc kubenswrapper[4712]: I0131 06:24:04.711768 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cjn26" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="registry-server" probeResult="failure" output=< Jan 31 06:24:04 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 06:24:04 crc kubenswrapper[4712]: > Jan 31 06:24:13 crc kubenswrapper[4712]: I0131 06:24:13.720521 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:13 crc kubenswrapper[4712]: I0131 06:24:13.776560 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:13 crc kubenswrapper[4712]: I0131 06:24:13.963238 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.315312 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cjn26" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="registry-server" containerID="cri-o://a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a" gracePeriod=2 Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.504835 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.803351 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.949138 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content\") pod \"368c7448-e142-4668-9e6d-668c716b1072\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.949258 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm9nn\" (UniqueName: \"kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn\") pod \"368c7448-e142-4668-9e6d-668c716b1072\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.949346 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities\") pod \"368c7448-e142-4668-9e6d-668c716b1072\" (UID: \"368c7448-e142-4668-9e6d-668c716b1072\") " Jan 31 06:24:15 crc kubenswrapper[4712]: I0131 06:24:15.951002 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities" (OuterVolumeSpecName: "utilities") pod "368c7448-e142-4668-9e6d-668c716b1072" (UID: "368c7448-e142-4668-9e6d-668c716b1072"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.021878 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn" (OuterVolumeSpecName: "kube-api-access-tm9nn") pod "368c7448-e142-4668-9e6d-668c716b1072" (UID: "368c7448-e142-4668-9e6d-668c716b1072"). InnerVolumeSpecName "kube-api-access-tm9nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.058073 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm9nn\" (UniqueName: \"kubernetes.io/projected/368c7448-e142-4668-9e6d-668c716b1072-kube-api-access-tm9nn\") on node \"crc\" DevicePath \"\"" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.058108 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.096444 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "368c7448-e142-4668-9e6d-668c716b1072" (UID: "368c7448-e142-4668-9e6d-668c716b1072"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.161420 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/368c7448-e142-4668-9e6d-668c716b1072-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.340075 4712 generic.go:334] "Generic (PLEG): container finished" podID="368c7448-e142-4668-9e6d-668c716b1072" containerID="a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a" exitCode=0 Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.340143 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerDied","Data":"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a"} Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.340155 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjn26" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.340233 4712 scope.go:117] "RemoveContainer" containerID="a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.340220 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjn26" event={"ID":"368c7448-e142-4668-9e6d-668c716b1072","Type":"ContainerDied","Data":"102015b23f86ff9a9b52998c5cb42391416f81cec13c448d102b540816c92aea"} Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.345828 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683"} Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.377604 4712 scope.go:117] "RemoveContainer" containerID="95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.400537 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.412127 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cjn26"] Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.413996 4712 scope.go:117] "RemoveContainer" containerID="552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.438808 4712 scope.go:117] "RemoveContainer" containerID="a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a" Jan 31 06:24:16 crc kubenswrapper[4712]: E0131 06:24:16.439515 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a\": container with ID starting with a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a not found: ID does not exist" containerID="a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.439559 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a"} err="failed to get container status \"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a\": rpc error: code = NotFound desc = could not find container \"a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a\": container with ID starting with a53bc0f13c4d91899ad71df534a78789dede1c11c12979b8323e97a0d0786c0a not found: ID does not exist" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.439587 4712 scope.go:117] "RemoveContainer" containerID="95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461" Jan 31 06:24:16 crc kubenswrapper[4712]: E0131 06:24:16.440099 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461\": container with ID starting with 95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461 not found: ID does not exist" containerID="95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.440128 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461"} err="failed to get container status \"95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461\": rpc error: code = NotFound desc = could not find container \"95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461\": container with ID starting with 95136a4be0d8ac0c481ec20464df6ba27c024d482523638cd9457183cddc4461 not found: ID does not exist" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.440149 4712 scope.go:117] "RemoveContainer" containerID="552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e" Jan 31 06:24:16 crc kubenswrapper[4712]: E0131 06:24:16.440650 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e\": container with ID starting with 552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e not found: ID does not exist" containerID="552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.440679 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e"} err="failed to get container status \"552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e\": rpc error: code = NotFound desc = could not find container \"552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e\": container with ID starting with 552f6fcb7ba24e8f7a0fd6df2cb0a58e155a2a53baa65f56284a9b086b27df1e not found: ID does not exist" Jan 31 06:24:16 crc kubenswrapper[4712]: I0131 06:24:16.514632 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="368c7448-e142-4668-9e6d-668c716b1072" path="/var/lib/kubelet/pods/368c7448-e142-4668-9e6d-668c716b1072/volumes" Jan 31 06:25:20 crc kubenswrapper[4712]: I0131 06:25:20.943336 4712 generic.go:334] "Generic (PLEG): container finished" podID="293b2cc8-393a-4043-ac70-89b0a519de4b" containerID="3ff9932b53a4daff4de33085d8971579815f77328d7112915916ab221cdb4c40" exitCode=0 Jan 31 06:25:20 crc kubenswrapper[4712]: I0131 06:25:20.943626 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" event={"ID":"293b2cc8-393a-4043-ac70-89b0a519de4b","Type":"ContainerDied","Data":"3ff9932b53a4daff4de33085d8971579815f77328d7112915916ab221cdb4c40"} Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.397980 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483241 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483315 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483338 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483403 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t9q9\" (UniqueName: \"kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483431 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483612 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483680 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483708 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.483734 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0\") pod \"293b2cc8-393a-4043-ac70-89b0a519de4b\" (UID: \"293b2cc8-393a-4043-ac70-89b0a519de4b\") " Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.494044 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9" (OuterVolumeSpecName: "kube-api-access-4t9q9") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "kube-api-access-4t9q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.507382 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.516503 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.516521 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.530467 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.530877 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory" (OuterVolumeSpecName: "inventory") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.531083 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.541524 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.556807 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "293b2cc8-393a-4043-ac70-89b0a519de4b" (UID: "293b2cc8-393a-4043-ac70-89b0a519de4b"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587071 4712 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587116 4712 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587134 4712 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587145 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587157 4712 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587191 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587208 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t9q9\" (UniqueName: \"kubernetes.io/projected/293b2cc8-393a-4043-ac70-89b0a519de4b-kube-api-access-4t9q9\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587221 4712 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.587233 4712 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/293b2cc8-393a-4043-ac70-89b0a519de4b-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.966984 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" event={"ID":"293b2cc8-393a-4043-ac70-89b0a519de4b","Type":"ContainerDied","Data":"698f84f53c4ed815bd18aa96226b10cb893f6b9dc7bd63e4f53ee6fd06de9ef0"} Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.967041 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="698f84f53c4ed815bd18aa96226b10cb893f6b9dc7bd63e4f53ee6fd06de9ef0" Jan 31 06:25:22 crc kubenswrapper[4712]: I0131 06:25:22.967128 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fpgv" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.053819 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf"] Jan 31 06:25:23 crc kubenswrapper[4712]: E0131 06:25:23.065752 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="extract-content" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.065800 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="extract-content" Jan 31 06:25:23 crc kubenswrapper[4712]: E0131 06:25:23.065815 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="293b2cc8-393a-4043-ac70-89b0a519de4b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.065824 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="293b2cc8-393a-4043-ac70-89b0a519de4b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 31 06:25:23 crc kubenswrapper[4712]: E0131 06:25:23.065838 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="extract-utilities" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.065844 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="extract-utilities" Jan 31 06:25:23 crc kubenswrapper[4712]: E0131 06:25:23.065866 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="registry-server" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.065871 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="registry-server" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.066205 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="368c7448-e142-4668-9e6d-668c716b1072" containerName="registry-server" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.066228 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="293b2cc8-393a-4043-ac70-89b0a519de4b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.066857 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf"] Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.066961 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.071639 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-5xfzr" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.072108 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.074507 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.074660 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.074822 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201417 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201747 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201775 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szkqf\" (UniqueName: \"kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201821 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201939 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201971 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.201998 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319007 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319121 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319196 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319275 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319359 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319400 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szkqf\" (UniqueName: \"kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.319521 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.324723 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.327786 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.328234 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.328954 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.330957 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.331460 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.345592 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szkqf\" (UniqueName: \"kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.390548 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.913040 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf"] Jan 31 06:25:23 crc kubenswrapper[4712]: I0131 06:25:23.978200 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" event={"ID":"467fe339-07fd-4c51-95df-4c8c123e2c03","Type":"ContainerStarted","Data":"6f8ba929cdeb5cb82b775d5606b3c3e797568e49ae607f3f5de3e09d4eca3c45"} Jan 31 06:25:27 crc kubenswrapper[4712]: I0131 06:25:27.008538 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" event={"ID":"467fe339-07fd-4c51-95df-4c8c123e2c03","Type":"ContainerStarted","Data":"68feca2fc2e75dbc1a25510bf9eab21e40f0e1fa7774cd02a46ddb100537f413"} Jan 31 06:25:27 crc kubenswrapper[4712]: I0131 06:25:27.040918 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" podStartSLOduration=2.12719128 podStartE2EDuration="4.040897305s" podCreationTimestamp="2026-01-31 06:25:23 +0000 UTC" firstStartedPulling="2026-01-31 06:25:23.916560079 +0000 UTC m=+2790.010441920" lastFinishedPulling="2026-01-31 06:25:25.830266104 +0000 UTC m=+2791.924147945" observedRunningTime="2026-01-31 06:25:27.031406036 +0000 UTC m=+2793.125287897" watchObservedRunningTime="2026-01-31 06:25:27.040897305 +0000 UTC m=+2793.134779146" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.601866 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.605156 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.623323 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.725666 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.725878 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psgtp\" (UniqueName: \"kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.726079 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.828264 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.828397 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psgtp\" (UniqueName: \"kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.828479 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.829059 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.829346 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.852041 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psgtp\" (UniqueName: \"kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp\") pod \"certified-operators-qqjg9\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:02 crc kubenswrapper[4712]: I0131 06:26:02.977376 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:03 crc kubenswrapper[4712]: I0131 06:26:03.554456 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:04 crc kubenswrapper[4712]: I0131 06:26:04.368860 4712 generic.go:334] "Generic (PLEG): container finished" podID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerID="16b44716d2221752f6d12b68bd91b6bf3df66e223cee167e12980fdf56b3c820" exitCode=0 Jan 31 06:26:04 crc kubenswrapper[4712]: I0131 06:26:04.369403 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerDied","Data":"16b44716d2221752f6d12b68bd91b6bf3df66e223cee167e12980fdf56b3c820"} Jan 31 06:26:04 crc kubenswrapper[4712]: I0131 06:26:04.369459 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerStarted","Data":"dee49bec3f24fb3a3dfa84ad3ac30c9c34ee27628560fe8ab4d92288d1e38465"} Jan 31 06:26:06 crc kubenswrapper[4712]: I0131 06:26:06.389127 4712 generic.go:334] "Generic (PLEG): container finished" podID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerID="8e1af3df876b66411cf95da006caa3cb53c1bc9927d7a51dbe5155d9d229e0f3" exitCode=0 Jan 31 06:26:06 crc kubenswrapper[4712]: I0131 06:26:06.389218 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerDied","Data":"8e1af3df876b66411cf95da006caa3cb53c1bc9927d7a51dbe5155d9d229e0f3"} Jan 31 06:26:08 crc kubenswrapper[4712]: I0131 06:26:08.410532 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerStarted","Data":"e58c57d6ddbd9ae97d68080001d082d81c739de08fce369c4f228202e38eb753"} Jan 31 06:26:09 crc kubenswrapper[4712]: I0131 06:26:09.472097 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qqjg9" podStartSLOduration=3.933507169 podStartE2EDuration="7.472065426s" podCreationTimestamp="2026-01-31 06:26:02 +0000 UTC" firstStartedPulling="2026-01-31 06:26:04.372054923 +0000 UTC m=+2830.465936774" lastFinishedPulling="2026-01-31 06:26:07.91061319 +0000 UTC m=+2834.004495031" observedRunningTime="2026-01-31 06:26:09.455888095 +0000 UTC m=+2835.549769956" watchObservedRunningTime="2026-01-31 06:26:09.472065426 +0000 UTC m=+2835.565947277" Jan 31 06:26:12 crc kubenswrapper[4712]: I0131 06:26:12.978876 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:12 crc kubenswrapper[4712]: I0131 06:26:12.979225 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:13 crc kubenswrapper[4712]: I0131 06:26:13.031078 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:13 crc kubenswrapper[4712]: I0131 06:26:13.511092 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:13 crc kubenswrapper[4712]: I0131 06:26:13.568011 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:15 crc kubenswrapper[4712]: I0131 06:26:15.474164 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qqjg9" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="registry-server" containerID="cri-o://e58c57d6ddbd9ae97d68080001d082d81c739de08fce369c4f228202e38eb753" gracePeriod=2 Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.484130 4712 generic.go:334] "Generic (PLEG): container finished" podID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerID="e58c57d6ddbd9ae97d68080001d082d81c739de08fce369c4f228202e38eb753" exitCode=0 Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.484336 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerDied","Data":"e58c57d6ddbd9ae97d68080001d082d81c739de08fce369c4f228202e38eb753"} Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.719122 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.816789 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psgtp\" (UniqueName: \"kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp\") pod \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.816913 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities\") pod \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.817046 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content\") pod \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\" (UID: \"28d9e8fe-e1c3-4ef9-bd45-0011a067b422\") " Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.817718 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities" (OuterVolumeSpecName: "utilities") pod "28d9e8fe-e1c3-4ef9-bd45-0011a067b422" (UID: "28d9e8fe-e1c3-4ef9-bd45-0011a067b422"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.823409 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp" (OuterVolumeSpecName: "kube-api-access-psgtp") pod "28d9e8fe-e1c3-4ef9-bd45-0011a067b422" (UID: "28d9e8fe-e1c3-4ef9-bd45-0011a067b422"). InnerVolumeSpecName "kube-api-access-psgtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.920165 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:26:16 crc kubenswrapper[4712]: I0131 06:26:16.920229 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psgtp\" (UniqueName: \"kubernetes.io/projected/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-kube-api-access-psgtp\") on node \"crc\" DevicePath \"\"" Jan 31 06:26:17 crc kubenswrapper[4712]: I0131 06:26:17.500481 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qqjg9" event={"ID":"28d9e8fe-e1c3-4ef9-bd45-0011a067b422","Type":"ContainerDied","Data":"dee49bec3f24fb3a3dfa84ad3ac30c9c34ee27628560fe8ab4d92288d1e38465"} Jan 31 06:26:17 crc kubenswrapper[4712]: I0131 06:26:17.500543 4712 scope.go:117] "RemoveContainer" containerID="e58c57d6ddbd9ae97d68080001d082d81c739de08fce369c4f228202e38eb753" Jan 31 06:26:17 crc kubenswrapper[4712]: I0131 06:26:17.500598 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qqjg9" Jan 31 06:26:17 crc kubenswrapper[4712]: I0131 06:26:17.532157 4712 scope.go:117] "RemoveContainer" containerID="8e1af3df876b66411cf95da006caa3cb53c1bc9927d7a51dbe5155d9d229e0f3" Jan 31 06:26:17 crc kubenswrapper[4712]: I0131 06:26:17.564508 4712 scope.go:117] "RemoveContainer" containerID="16b44716d2221752f6d12b68bd91b6bf3df66e223cee167e12980fdf56b3c820" Jan 31 06:26:18 crc kubenswrapper[4712]: I0131 06:26:18.370813 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28d9e8fe-e1c3-4ef9-bd45-0011a067b422" (UID: "28d9e8fe-e1c3-4ef9-bd45-0011a067b422"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:26:18 crc kubenswrapper[4712]: I0131 06:26:18.437055 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:18 crc kubenswrapper[4712]: I0131 06:26:18.448690 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qqjg9"] Jan 31 06:26:18 crc kubenswrapper[4712]: I0131 06:26:18.458077 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d9e8fe-e1c3-4ef9-bd45-0011a067b422-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:26:18 crc kubenswrapper[4712]: I0131 06:26:18.514971 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" path="/var/lib/kubelet/pods/28d9e8fe-e1c3-4ef9-bd45-0011a067b422/volumes" Jan 31 06:26:42 crc kubenswrapper[4712]: I0131 06:26:42.497291 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:26:42 crc kubenswrapper[4712]: I0131 06:26:42.497912 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:27:12 crc kubenswrapper[4712]: I0131 06:27:12.497081 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:27:12 crc kubenswrapper[4712]: I0131 06:27:12.497717 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:27:42 crc kubenswrapper[4712]: I0131 06:27:42.497936 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:27:42 crc kubenswrapper[4712]: I0131 06:27:42.498518 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:27:42 crc kubenswrapper[4712]: I0131 06:27:42.498565 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:27:42 crc kubenswrapper[4712]: I0131 06:27:42.499440 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:27:42 crc kubenswrapper[4712]: I0131 06:27:42.499493 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683" gracePeriod=600 Jan 31 06:27:43 crc kubenswrapper[4712]: I0131 06:27:43.320234 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683" exitCode=0 Jan 31 06:27:43 crc kubenswrapper[4712]: I0131 06:27:43.320314 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683"} Jan 31 06:27:43 crc kubenswrapper[4712]: I0131 06:27:43.320655 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee"} Jan 31 06:27:43 crc kubenswrapper[4712]: I0131 06:27:43.320687 4712 scope.go:117] "RemoveContainer" containerID="cf9098132a4f91acfa45db725d6a5d7acf4934e361f6f4b783173b4e8e8a6785" Jan 31 06:27:49 crc kubenswrapper[4712]: I0131 06:27:49.386196 4712 generic.go:334] "Generic (PLEG): container finished" podID="467fe339-07fd-4c51-95df-4c8c123e2c03" containerID="68feca2fc2e75dbc1a25510bf9eab21e40f0e1fa7774cd02a46ddb100537f413" exitCode=0 Jan 31 06:27:49 crc kubenswrapper[4712]: I0131 06:27:49.386231 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" event={"ID":"467fe339-07fd-4c51-95df-4c8c123e2c03","Type":"ContainerDied","Data":"68feca2fc2e75dbc1a25510bf9eab21e40f0e1fa7774cd02a46ddb100537f413"} Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.778047 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909159 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909287 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szkqf\" (UniqueName: \"kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909340 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909484 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909609 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909744 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.909783 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam\") pod \"467fe339-07fd-4c51-95df-4c8c123e2c03\" (UID: \"467fe339-07fd-4c51-95df-4c8c123e2c03\") " Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.917516 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.920402 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf" (OuterVolumeSpecName: "kube-api-access-szkqf") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "kube-api-access-szkqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.941928 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.945831 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.949741 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.950278 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory" (OuterVolumeSpecName: "inventory") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:50 crc kubenswrapper[4712]: I0131 06:27:50.953893 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "467fe339-07fd-4c51-95df-4c8c123e2c03" (UID: "467fe339-07fd-4c51-95df-4c8c123e2c03"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013343 4712 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013402 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013418 4712 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013429 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szkqf\" (UniqueName: \"kubernetes.io/projected/467fe339-07fd-4c51-95df-4c8c123e2c03-kube-api-access-szkqf\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013440 4712 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-inventory\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013451 4712 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.013464 4712 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/467fe339-07fd-4c51-95df-4c8c123e2c03-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.414280 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" event={"ID":"467fe339-07fd-4c51-95df-4c8c123e2c03","Type":"ContainerDied","Data":"6f8ba929cdeb5cb82b775d5606b3c3e797568e49ae607f3f5de3e09d4eca3c45"} Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.414554 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f8ba929cdeb5cb82b775d5606b3c3e797568e49ae607f3f5de3e09d4eca3c45" Jan 31 06:27:51 crc kubenswrapper[4712]: I0131 06:27:51.414386 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.475122 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:16 crc kubenswrapper[4712]: E0131 06:28:16.476216 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="extract-content" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476243 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="extract-content" Jan 31 06:28:16 crc kubenswrapper[4712]: E0131 06:28:16.476273 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="extract-utilities" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476281 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="extract-utilities" Jan 31 06:28:16 crc kubenswrapper[4712]: E0131 06:28:16.476294 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="registry-server" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476304 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="registry-server" Jan 31 06:28:16 crc kubenswrapper[4712]: E0131 06:28:16.476329 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="467fe339-07fd-4c51-95df-4c8c123e2c03" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476340 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="467fe339-07fd-4c51-95df-4c8c123e2c03" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476560 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="467fe339-07fd-4c51-95df-4c8c123e2c03" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.476589 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d9e8fe-e1c3-4ef9-bd45-0011a067b422" containerName="registry-server" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.478399 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.495031 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.569433 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwjpc\" (UniqueName: \"kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.569524 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.569711 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.671531 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.671663 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwjpc\" (UniqueName: \"kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.671706 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.672295 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.672648 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.696700 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwjpc\" (UniqueName: \"kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc\") pod \"community-operators-lmhzg\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:16 crc kubenswrapper[4712]: I0131 06:28:16.813590 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.435295 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.479939 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.482102 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.496490 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.591779 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.592292 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.592381 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhvl2\" (UniqueName: \"kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.651559 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerStarted","Data":"6b0307c2eb5e6a11d9dda460d89957976a6b475c3630b5a24bd934e45478ad75"} Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.694291 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhvl2\" (UniqueName: \"kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.694423 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.694498 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.695038 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.695477 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.723351 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhvl2\" (UniqueName: \"kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2\") pod \"redhat-marketplace-njw4n\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:17 crc kubenswrapper[4712]: I0131 06:28:17.819889 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.312481 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:18 crc kubenswrapper[4712]: W0131 06:28:18.323039 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3273d4a9_226e_4f82_ad32_55b1e724f202.slice/crio-51972d3fa12f36663b94faa6cd31da10eefb7f8d23ec8471c1b382825a1c6395 WatchSource:0}: Error finding container 51972d3fa12f36663b94faa6cd31da10eefb7f8d23ec8471c1b382825a1c6395: Status 404 returned error can't find the container with id 51972d3fa12f36663b94faa6cd31da10eefb7f8d23ec8471c1b382825a1c6395 Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.663688 4712 generic.go:334] "Generic (PLEG): container finished" podID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerID="93a6e1ec9aee077e34bf03a6b81c2f101b46d76d195f172c039bc314af3ee5ce" exitCode=0 Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.663794 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerDied","Data":"93a6e1ec9aee077e34bf03a6b81c2f101b46d76d195f172c039bc314af3ee5ce"} Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.665477 4712 generic.go:334] "Generic (PLEG): container finished" podID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerID="531b5ef6a9e8f870e3512efa5942fc2ca3a1d96809841766d186583bf5977418" exitCode=0 Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.665529 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerDied","Data":"531b5ef6a9e8f870e3512efa5942fc2ca3a1d96809841766d186583bf5977418"} Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.665584 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerStarted","Data":"51972d3fa12f36663b94faa6cd31da10eefb7f8d23ec8471c1b382825a1c6395"} Jan 31 06:28:18 crc kubenswrapper[4712]: I0131 06:28:18.666519 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:28:20 crc kubenswrapper[4712]: I0131 06:28:20.685795 4712 generic.go:334] "Generic (PLEG): container finished" podID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerID="f37efa13fbac7ffbe446870019161e5bf5c5722f929b49d5aead82423277b6c5" exitCode=0 Jan 31 06:28:20 crc kubenswrapper[4712]: I0131 06:28:20.685922 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerDied","Data":"f37efa13fbac7ffbe446870019161e5bf5c5722f929b49d5aead82423277b6c5"} Jan 31 06:28:20 crc kubenswrapper[4712]: I0131 06:28:20.690024 4712 generic.go:334] "Generic (PLEG): container finished" podID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerID="64ce00f40f4809fad6c020b35a4cc55f4d06bce4ec7a0d5ad4729902e131b1e9" exitCode=0 Jan 31 06:28:20 crc kubenswrapper[4712]: I0131 06:28:20.690063 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerDied","Data":"64ce00f40f4809fad6c020b35a4cc55f4d06bce4ec7a0d5ad4729902e131b1e9"} Jan 31 06:28:22 crc kubenswrapper[4712]: I0131 06:28:22.712371 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerStarted","Data":"fe20de992bccb837fa2878b4c35953cd486963f0a19b9ad05e15f7b6ab756ea9"} Jan 31 06:28:22 crc kubenswrapper[4712]: I0131 06:28:22.784306 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-njw4n" podStartSLOduration=2.382726023 podStartE2EDuration="5.784278623s" podCreationTimestamp="2026-01-31 06:28:17 +0000 UTC" firstStartedPulling="2026-01-31 06:28:18.667277122 +0000 UTC m=+2964.761158963" lastFinishedPulling="2026-01-31 06:28:22.068829722 +0000 UTC m=+2968.162711563" observedRunningTime="2026-01-31 06:28:22.774427464 +0000 UTC m=+2968.868309325" watchObservedRunningTime="2026-01-31 06:28:22.784278623 +0000 UTC m=+2968.878160464" Jan 31 06:28:23 crc kubenswrapper[4712]: I0131 06:28:23.724851 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerStarted","Data":"b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a"} Jan 31 06:28:23 crc kubenswrapper[4712]: I0131 06:28:23.750494 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lmhzg" podStartSLOduration=3.366938211 podStartE2EDuration="7.750470793s" podCreationTimestamp="2026-01-31 06:28:16 +0000 UTC" firstStartedPulling="2026-01-31 06:28:18.666143185 +0000 UTC m=+2964.760025026" lastFinishedPulling="2026-01-31 06:28:23.049675757 +0000 UTC m=+2969.143557608" observedRunningTime="2026-01-31 06:28:23.742586591 +0000 UTC m=+2969.836468432" watchObservedRunningTime="2026-01-31 06:28:23.750470793 +0000 UTC m=+2969.844352634" Jan 31 06:28:26 crc kubenswrapper[4712]: I0131 06:28:26.814734 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:26 crc kubenswrapper[4712]: I0131 06:28:26.815298 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:26 crc kubenswrapper[4712]: I0131 06:28:26.873987 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:27 crc kubenswrapper[4712]: I0131 06:28:27.823942 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:27 crc kubenswrapper[4712]: I0131 06:28:27.824004 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:27 crc kubenswrapper[4712]: I0131 06:28:27.883805 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:28 crc kubenswrapper[4712]: I0131 06:28:28.825223 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:29 crc kubenswrapper[4712]: I0131 06:28:29.261206 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:30 crc kubenswrapper[4712]: I0131 06:28:30.792727 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-njw4n" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="registry-server" containerID="cri-o://fe20de992bccb837fa2878b4c35953cd486963f0a19b9ad05e15f7b6ab756ea9" gracePeriod=2 Jan 31 06:28:31 crc kubenswrapper[4712]: I0131 06:28:31.804007 4712 generic.go:334] "Generic (PLEG): container finished" podID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerID="fe20de992bccb837fa2878b4c35953cd486963f0a19b9ad05e15f7b6ab756ea9" exitCode=0 Jan 31 06:28:31 crc kubenswrapper[4712]: I0131 06:28:31.804050 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerDied","Data":"fe20de992bccb837fa2878b4c35953cd486963f0a19b9ad05e15f7b6ab756ea9"} Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.157684 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.242107 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content\") pod \"3273d4a9-226e-4f82-ad32-55b1e724f202\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.242231 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhvl2\" (UniqueName: \"kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2\") pod \"3273d4a9-226e-4f82-ad32-55b1e724f202\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.242396 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities\") pod \"3273d4a9-226e-4f82-ad32-55b1e724f202\" (UID: \"3273d4a9-226e-4f82-ad32-55b1e724f202\") " Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.244042 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities" (OuterVolumeSpecName: "utilities") pod "3273d4a9-226e-4f82-ad32-55b1e724f202" (UID: "3273d4a9-226e-4f82-ad32-55b1e724f202"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.253213 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2" (OuterVolumeSpecName: "kube-api-access-nhvl2") pod "3273d4a9-226e-4f82-ad32-55b1e724f202" (UID: "3273d4a9-226e-4f82-ad32-55b1e724f202"). InnerVolumeSpecName "kube-api-access-nhvl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.281499 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3273d4a9-226e-4f82-ad32-55b1e724f202" (UID: "3273d4a9-226e-4f82-ad32-55b1e724f202"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.345318 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.345354 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3273d4a9-226e-4f82-ad32-55b1e724f202-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.345368 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhvl2\" (UniqueName: \"kubernetes.io/projected/3273d4a9-226e-4f82-ad32-55b1e724f202-kube-api-access-nhvl2\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.814974 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-njw4n" event={"ID":"3273d4a9-226e-4f82-ad32-55b1e724f202","Type":"ContainerDied","Data":"51972d3fa12f36663b94faa6cd31da10eefb7f8d23ec8471c1b382825a1c6395"} Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.815032 4712 scope.go:117] "RemoveContainer" containerID="fe20de992bccb837fa2878b4c35953cd486963f0a19b9ad05e15f7b6ab756ea9" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.815052 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-njw4n" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.840367 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.844350 4712 scope.go:117] "RemoveContainer" containerID="64ce00f40f4809fad6c020b35a4cc55f4d06bce4ec7a0d5ad4729902e131b1e9" Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.851935 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-njw4n"] Jan 31 06:28:32 crc kubenswrapper[4712]: I0131 06:28:32.866396 4712 scope.go:117] "RemoveContainer" containerID="531b5ef6a9e8f870e3512efa5942fc2ca3a1d96809841766d186583bf5977418" Jan 31 06:28:34 crc kubenswrapper[4712]: I0131 06:28:34.515827 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" path="/var/lib/kubelet/pods/3273d4a9-226e-4f82-ad32-55b1e724f202/volumes" Jan 31 06:28:36 crc kubenswrapper[4712]: I0131 06:28:36.875293 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:36 crc kubenswrapper[4712]: I0131 06:28:36.932221 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:37 crc kubenswrapper[4712]: I0131 06:28:37.864842 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lmhzg" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="registry-server" containerID="cri-o://b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a" gracePeriod=2 Jan 31 06:28:38 crc kubenswrapper[4712]: E0131 06:28:38.115482 4712 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d8a918_0165_4bec_9f59_bf1e7de47746.slice/crio-b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d8a918_0165_4bec_9f59_bf1e7de47746.slice/crio-conmon-b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a.scope\": RecentStats: unable to find data in memory cache]" Jan 31 06:28:38 crc kubenswrapper[4712]: I0131 06:28:38.876685 4712 generic.go:334] "Generic (PLEG): container finished" podID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerID="b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a" exitCode=0 Jan 31 06:28:38 crc kubenswrapper[4712]: I0131 06:28:38.876779 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerDied","Data":"b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a"} Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.003666 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.085005 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content\") pod \"44d8a918-0165-4bec-9f59-bf1e7de47746\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.085077 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities\") pod \"44d8a918-0165-4bec-9f59-bf1e7de47746\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.085154 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwjpc\" (UniqueName: \"kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc\") pod \"44d8a918-0165-4bec-9f59-bf1e7de47746\" (UID: \"44d8a918-0165-4bec-9f59-bf1e7de47746\") " Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.086635 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities" (OuterVolumeSpecName: "utilities") pod "44d8a918-0165-4bec-9f59-bf1e7de47746" (UID: "44d8a918-0165-4bec-9f59-bf1e7de47746"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.093486 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc" (OuterVolumeSpecName: "kube-api-access-fwjpc") pod "44d8a918-0165-4bec-9f59-bf1e7de47746" (UID: "44d8a918-0165-4bec-9f59-bf1e7de47746"). InnerVolumeSpecName "kube-api-access-fwjpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.138656 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44d8a918-0165-4bec-9f59-bf1e7de47746" (UID: "44d8a918-0165-4bec-9f59-bf1e7de47746"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.187867 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwjpc\" (UniqueName: \"kubernetes.io/projected/44d8a918-0165-4bec-9f59-bf1e7de47746-kube-api-access-fwjpc\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.187912 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.187937 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44d8a918-0165-4bec-9f59-bf1e7de47746-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.891919 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lmhzg" event={"ID":"44d8a918-0165-4bec-9f59-bf1e7de47746","Type":"ContainerDied","Data":"6b0307c2eb5e6a11d9dda460d89957976a6b475c3630b5a24bd934e45478ad75"} Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.892293 4712 scope.go:117] "RemoveContainer" containerID="b72770e6ca2af04dd6f8cd533270d8f5cb2efa68bd07b65df272cac0b837268a" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.892072 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lmhzg" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.928483 4712 scope.go:117] "RemoveContainer" containerID="f37efa13fbac7ffbe446870019161e5bf5c5722f929b49d5aead82423277b6c5" Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.933520 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.953446 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lmhzg"] Jan 31 06:28:39 crc kubenswrapper[4712]: I0131 06:28:39.957389 4712 scope.go:117] "RemoveContainer" containerID="93a6e1ec9aee077e34bf03a6b81c2f101b46d76d195f172c039bc314af3ee5ce" Jan 31 06:28:40 crc kubenswrapper[4712]: I0131 06:28:40.513523 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" path="/var/lib/kubelet/pods/44d8a918-0165-4bec-9f59-bf1e7de47746/volumes" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.319841 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.320972 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.320994 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.321018 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321028 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.321059 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="extract-utilities" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321067 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="extract-utilities" Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.321078 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="extract-content" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321085 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="extract-content" Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.321114 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="extract-content" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321122 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="extract-content" Jan 31 06:28:51 crc kubenswrapper[4712]: E0131 06:28:51.321139 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="extract-utilities" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321146 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="extract-utilities" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321396 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="3273d4a9-226e-4f82-ad32-55b1e724f202" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.321424 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="44d8a918-0165-4bec-9f59-bf1e7de47746" containerName="registry-server" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.322221 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.325745 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.325835 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.325754 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-fq6c7" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.327748 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.330706 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362293 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362422 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362456 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362490 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362531 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362563 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362600 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362728 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn96k\" (UniqueName: \"kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.362780 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465012 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465064 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465093 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465131 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465160 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465204 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465248 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn96k\" (UniqueName: \"kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465288 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465324 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465508 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465753 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.465784 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.466460 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.466720 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.471235 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.472210 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.476058 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.484365 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn96k\" (UniqueName: \"kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.497877 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " pod="openstack/tempest-tests-tempest" Jan 31 06:28:51 crc kubenswrapper[4712]: I0131 06:28:51.657793 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 31 06:28:52 crc kubenswrapper[4712]: I0131 06:28:52.146374 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 31 06:28:53 crc kubenswrapper[4712]: I0131 06:28:53.028328 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0f27224a-407b-4803-afd6-9c1caa3fbfdf","Type":"ContainerStarted","Data":"356e0d8705b3eb370d799af9325b97695445745d60c408e35a7a7dab5903d0e0"} Jan 31 06:29:21 crc kubenswrapper[4712]: E0131 06:29:21.629138 4712 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-tempest-all:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:29:21 crc kubenswrapper[4712]: E0131 06:29:21.629768 4712 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos9/openstack-tempest-all:d790bc5e0de33b4fa3f6e15acfa448e0" Jan 31 06:29:21 crc kubenswrapper[4712]: E0131 06:29:21.630050 4712 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-master-centos9/openstack-tempest-all:d790bc5e0de33b4fa3f6e15acfa448e0,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mn96k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(0f27224a-407b-4803-afd6-9c1caa3fbfdf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 31 06:29:21 crc kubenswrapper[4712]: E0131 06:29:21.631351 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" Jan 31 06:29:22 crc kubenswrapper[4712]: E0131 06:29:22.309987 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos9/openstack-tempest-all:d790bc5e0de33b4fa3f6e15acfa448e0\\\"\"" pod="openstack/tempest-tests-tempest" podUID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" Jan 31 06:29:38 crc kubenswrapper[4712]: I0131 06:29:38.841520 4712 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" podUID="623f0661-5fd0-4c1c-94b8-7cb41dc60f5f" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.72:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:29:38 crc kubenswrapper[4712]: I0131 06:29:38.841764 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-ktrfv" podUID="623f0661-5fd0-4c1c-94b8-7cb41dc60f5f" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.72:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 31 06:29:39 crc kubenswrapper[4712]: I0131 06:29:39.413760 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 31 06:29:40 crc kubenswrapper[4712]: I0131 06:29:40.274228 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0f27224a-407b-4803-afd6-9c1caa3fbfdf","Type":"ContainerStarted","Data":"48f486d920885812aafd31457a07b646cc2d1dc654e252caa00a0fcc45a369d5"} Jan 31 06:29:40 crc kubenswrapper[4712]: I0131 06:29:40.304577 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.046927179 podStartE2EDuration="50.304550565s" podCreationTimestamp="2026-01-31 06:28:50 +0000 UTC" firstStartedPulling="2026-01-31 06:28:52.153352803 +0000 UTC m=+2998.247234644" lastFinishedPulling="2026-01-31 06:29:39.410976179 +0000 UTC m=+3045.504858030" observedRunningTime="2026-01-31 06:29:40.297884933 +0000 UTC m=+3046.391766814" watchObservedRunningTime="2026-01-31 06:29:40.304550565 +0000 UTC m=+3046.398432406" Jan 31 06:29:42 crc kubenswrapper[4712]: I0131 06:29:42.497523 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:29:42 crc kubenswrapper[4712]: I0131 06:29:42.497906 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.153463 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494"] Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.161466 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.163948 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494"] Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.166775 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.168409 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.338240 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxlpw\" (UniqueName: \"kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.338660 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.338996 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.440894 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.441055 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxlpw\" (UniqueName: \"kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.441135 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.442467 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.460339 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.465883 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxlpw\" (UniqueName: \"kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw\") pod \"collect-profiles-29497350-nn494\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.488017 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:00 crc kubenswrapper[4712]: I0131 06:30:00.970935 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494"] Jan 31 06:30:00 crc kubenswrapper[4712]: W0131 06:30:00.977635 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9d647d2_ec70_45cd_87d6_2451fd81d1db.slice/crio-a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4 WatchSource:0}: Error finding container a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4: Status 404 returned error can't find the container with id a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4 Jan 31 06:30:01 crc kubenswrapper[4712]: I0131 06:30:01.473783 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" event={"ID":"b9d647d2-ec70-45cd-87d6-2451fd81d1db","Type":"ContainerStarted","Data":"6181cb8367ebd2e139f1d69a38463640164c642b9823b55b4a333dc7a7ee69a1"} Jan 31 06:30:01 crc kubenswrapper[4712]: I0131 06:30:01.475324 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" event={"ID":"b9d647d2-ec70-45cd-87d6-2451fd81d1db","Type":"ContainerStarted","Data":"a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4"} Jan 31 06:30:01 crc kubenswrapper[4712]: I0131 06:30:01.499292 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" podStartSLOduration=1.499270006 podStartE2EDuration="1.499270006s" podCreationTimestamp="2026-01-31 06:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 06:30:01.489441888 +0000 UTC m=+3067.583323749" watchObservedRunningTime="2026-01-31 06:30:01.499270006 +0000 UTC m=+3067.593151847" Jan 31 06:30:02 crc kubenswrapper[4712]: I0131 06:30:02.487451 4712 generic.go:334] "Generic (PLEG): container finished" podID="b9d647d2-ec70-45cd-87d6-2451fd81d1db" containerID="6181cb8367ebd2e139f1d69a38463640164c642b9823b55b4a333dc7a7ee69a1" exitCode=0 Jan 31 06:30:02 crc kubenswrapper[4712]: I0131 06:30:02.487745 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" event={"ID":"b9d647d2-ec70-45cd-87d6-2451fd81d1db","Type":"ContainerDied","Data":"6181cb8367ebd2e139f1d69a38463640164c642b9823b55b4a333dc7a7ee69a1"} Jan 31 06:30:03 crc kubenswrapper[4712]: I0131 06:30:03.883280 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.023400 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume\") pod \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.023574 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume\") pod \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.023774 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxlpw\" (UniqueName: \"kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw\") pod \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\" (UID: \"b9d647d2-ec70-45cd-87d6-2451fd81d1db\") " Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.024455 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume" (OuterVolumeSpecName: "config-volume") pod "b9d647d2-ec70-45cd-87d6-2451fd81d1db" (UID: "b9d647d2-ec70-45cd-87d6-2451fd81d1db"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.024715 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d647d2-ec70-45cd-87d6-2451fd81d1db-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.029629 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b9d647d2-ec70-45cd-87d6-2451fd81d1db" (UID: "b9d647d2-ec70-45cd-87d6-2451fd81d1db"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.029726 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw" (OuterVolumeSpecName: "kube-api-access-vxlpw") pod "b9d647d2-ec70-45cd-87d6-2451fd81d1db" (UID: "b9d647d2-ec70-45cd-87d6-2451fd81d1db"). InnerVolumeSpecName "kube-api-access-vxlpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.128053 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxlpw\" (UniqueName: \"kubernetes.io/projected/b9d647d2-ec70-45cd-87d6-2451fd81d1db-kube-api-access-vxlpw\") on node \"crc\" DevicePath \"\"" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.128113 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d647d2-ec70-45cd-87d6-2451fd81d1db-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.514149 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.519400 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497350-nn494" event={"ID":"b9d647d2-ec70-45cd-87d6-2451fd81d1db","Type":"ContainerDied","Data":"a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4"} Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.519708 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a01fe6a53250ce7c2ed58e92e8ecc74d83b8db732bdc8e03caecd7481be19bb4" Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.576728 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh"] Jan 31 06:30:04 crc kubenswrapper[4712]: I0131 06:30:04.584964 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497305-pfvsh"] Jan 31 06:30:06 crc kubenswrapper[4712]: I0131 06:30:06.517400 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b31acf79-3f94-4e6f-ae2e-3f064f8cc35e" path="/var/lib/kubelet/pods/b31acf79-3f94-4e6f-ae2e-3f064f8cc35e/volumes" Jan 31 06:30:12 crc kubenswrapper[4712]: I0131 06:30:12.496936 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:30:12 crc kubenswrapper[4712]: I0131 06:30:12.498119 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:30:21 crc kubenswrapper[4712]: I0131 06:30:21.260188 4712 scope.go:117] "RemoveContainer" containerID="d449ac7e9b8973adfaaddf1833b128f805501c3cf9e3e9a2b6536eacf8bc5656" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.497753 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.498359 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.498415 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.499379 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.499439 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" gracePeriod=600 Jan 31 06:30:42 crc kubenswrapper[4712]: E0131 06:30:42.716550 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.922479 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" exitCode=0 Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.922524 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee"} Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.922601 4712 scope.go:117] "RemoveContainer" containerID="bd0db9f1b9188f04f9a07e3bf278e214acd4d01c9434c6c269ba968f016c0683" Jan 31 06:30:42 crc kubenswrapper[4712]: I0131 06:30:42.923866 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:30:42 crc kubenswrapper[4712]: E0131 06:30:42.924236 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:30:57 crc kubenswrapper[4712]: I0131 06:30:57.504530 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:30:57 crc kubenswrapper[4712]: E0131 06:30:57.505373 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:31:09 crc kubenswrapper[4712]: I0131 06:31:09.504724 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:31:09 crc kubenswrapper[4712]: E0131 06:31:09.505672 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:31:20 crc kubenswrapper[4712]: I0131 06:31:20.504573 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:31:20 crc kubenswrapper[4712]: E0131 06:31:20.505881 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:31:35 crc kubenswrapper[4712]: I0131 06:31:35.504840 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:31:35 crc kubenswrapper[4712]: E0131 06:31:35.505728 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:31:47 crc kubenswrapper[4712]: I0131 06:31:47.504275 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:31:47 crc kubenswrapper[4712]: E0131 06:31:47.505436 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:32:02 crc kubenswrapper[4712]: I0131 06:32:02.504530 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:32:02 crc kubenswrapper[4712]: E0131 06:32:02.505271 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:32:14 crc kubenswrapper[4712]: I0131 06:32:14.514138 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:32:14 crc kubenswrapper[4712]: E0131 06:32:14.515940 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:32:29 crc kubenswrapper[4712]: I0131 06:32:29.505733 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:32:29 crc kubenswrapper[4712]: E0131 06:32:29.506636 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:32:41 crc kubenswrapper[4712]: I0131 06:32:41.505142 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:32:41 crc kubenswrapper[4712]: E0131 06:32:41.506124 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:32:52 crc kubenswrapper[4712]: I0131 06:32:52.504875 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:32:52 crc kubenswrapper[4712]: E0131 06:32:52.505835 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:33:07 crc kubenswrapper[4712]: I0131 06:33:07.505241 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:33:07 crc kubenswrapper[4712]: E0131 06:33:07.506140 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:33:21 crc kubenswrapper[4712]: I0131 06:33:21.503997 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:33:21 crc kubenswrapper[4712]: E0131 06:33:21.504905 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:33:34 crc kubenswrapper[4712]: I0131 06:33:34.515045 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:33:34 crc kubenswrapper[4712]: E0131 06:33:34.516032 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:33:49 crc kubenswrapper[4712]: I0131 06:33:49.505846 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:33:49 crc kubenswrapper[4712]: E0131 06:33:49.506762 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:34:03 crc kubenswrapper[4712]: I0131 06:34:03.504314 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:34:03 crc kubenswrapper[4712]: E0131 06:34:03.505086 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:34:15 crc kubenswrapper[4712]: I0131 06:34:15.504569 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:34:15 crc kubenswrapper[4712]: E0131 06:34:15.505501 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.787482 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:16 crc kubenswrapper[4712]: E0131 06:34:16.788371 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d647d2-ec70-45cd-87d6-2451fd81d1db" containerName="collect-profiles" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.788387 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d647d2-ec70-45cd-87d6-2451fd81d1db" containerName="collect-profiles" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.788637 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d647d2-ec70-45cd-87d6-2451fd81d1db" containerName="collect-profiles" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.790107 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.807946 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.823832 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.823973 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.824088 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d57l\" (UniqueName: \"kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.926299 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.926456 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.926527 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d57l\" (UniqueName: \"kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.926895 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.927152 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:16 crc kubenswrapper[4712]: I0131 06:34:16.947287 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d57l\" (UniqueName: \"kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l\") pod \"redhat-operators-gtgbr\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:17 crc kubenswrapper[4712]: I0131 06:34:17.122271 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:17 crc kubenswrapper[4712]: I0131 06:34:17.658093 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:17 crc kubenswrapper[4712]: I0131 06:34:17.850764 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerStarted","Data":"e6bd92442dcddb50663057aac45b4025657f4c8ad2068dcf1d6dc2041050781c"} Jan 31 06:34:18 crc kubenswrapper[4712]: I0131 06:34:18.861123 4712 generic.go:334] "Generic (PLEG): container finished" podID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerID="6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637" exitCode=0 Jan 31 06:34:18 crc kubenswrapper[4712]: I0131 06:34:18.861293 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerDied","Data":"6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637"} Jan 31 06:34:18 crc kubenswrapper[4712]: I0131 06:34:18.863598 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:34:19 crc kubenswrapper[4712]: I0131 06:34:19.883530 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerStarted","Data":"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3"} Jan 31 06:34:20 crc kubenswrapper[4712]: I0131 06:34:20.893448 4712 generic.go:334] "Generic (PLEG): container finished" podID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerID="0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3" exitCode=0 Jan 31 06:34:20 crc kubenswrapper[4712]: I0131 06:34:20.893761 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerDied","Data":"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3"} Jan 31 06:34:23 crc kubenswrapper[4712]: I0131 06:34:23.921601 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerStarted","Data":"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c"} Jan 31 06:34:23 crc kubenswrapper[4712]: I0131 06:34:23.947635 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gtgbr" podStartSLOduration=3.51457396 podStartE2EDuration="7.947613426s" podCreationTimestamp="2026-01-31 06:34:16 +0000 UTC" firstStartedPulling="2026-01-31 06:34:18.863318037 +0000 UTC m=+3324.957199878" lastFinishedPulling="2026-01-31 06:34:23.296357503 +0000 UTC m=+3329.390239344" observedRunningTime="2026-01-31 06:34:23.940699218 +0000 UTC m=+3330.034581059" watchObservedRunningTime="2026-01-31 06:34:23.947613426 +0000 UTC m=+3330.041495267" Jan 31 06:34:26 crc kubenswrapper[4712]: I0131 06:34:26.505103 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:34:26 crc kubenswrapper[4712]: E0131 06:34:26.506475 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:34:27 crc kubenswrapper[4712]: I0131 06:34:27.123039 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:27 crc kubenswrapper[4712]: I0131 06:34:27.123100 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:28 crc kubenswrapper[4712]: I0131 06:34:28.173075 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gtgbr" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="registry-server" probeResult="failure" output=< Jan 31 06:34:28 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 06:34:28 crc kubenswrapper[4712]: > Jan 31 06:34:37 crc kubenswrapper[4712]: I0131 06:34:37.172715 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:37 crc kubenswrapper[4712]: I0131 06:34:37.230950 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:37 crc kubenswrapper[4712]: I0131 06:34:37.409114 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:37 crc kubenswrapper[4712]: I0131 06:34:37.504847 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:34:37 crc kubenswrapper[4712]: E0131 06:34:37.505250 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.141857 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gtgbr" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="registry-server" containerID="cri-o://758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c" gracePeriod=2 Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.685793 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.823325 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d57l\" (UniqueName: \"kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l\") pod \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.823415 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities\") pod \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.823456 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content\") pod \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\" (UID: \"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1\") " Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.824485 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities" (OuterVolumeSpecName: "utilities") pod "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" (UID: "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.829689 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l" (OuterVolumeSpecName: "kube-api-access-4d57l") pod "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" (UID: "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1"). InnerVolumeSpecName "kube-api-access-4d57l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.926795 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d57l\" (UniqueName: \"kubernetes.io/projected/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-kube-api-access-4d57l\") on node \"crc\" DevicePath \"\"" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.926829 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:34:39 crc kubenswrapper[4712]: I0131 06:34:39.973334 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" (UID: "8040fbe1-87d8-4fa9-8990-cb3bbf572ac1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.028814 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.155074 4712 generic.go:334] "Generic (PLEG): container finished" podID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerID="758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c" exitCode=0 Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.155138 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerDied","Data":"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c"} Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.155187 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gtgbr" event={"ID":"8040fbe1-87d8-4fa9-8990-cb3bbf572ac1","Type":"ContainerDied","Data":"e6bd92442dcddb50663057aac45b4025657f4c8ad2068dcf1d6dc2041050781c"} Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.155212 4712 scope.go:117] "RemoveContainer" containerID="758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.155369 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gtgbr" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.179018 4712 scope.go:117] "RemoveContainer" containerID="0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.194503 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.206585 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gtgbr"] Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.228591 4712 scope.go:117] "RemoveContainer" containerID="6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.279718 4712 scope.go:117] "RemoveContainer" containerID="758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c" Jan 31 06:34:40 crc kubenswrapper[4712]: E0131 06:34:40.281294 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c\": container with ID starting with 758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c not found: ID does not exist" containerID="758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.281351 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c"} err="failed to get container status \"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c\": rpc error: code = NotFound desc = could not find container \"758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c\": container with ID starting with 758da992f00473284ad94f33abfdb673414f736a8ace9722f84e75457b04101c not found: ID does not exist" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.281387 4712 scope.go:117] "RemoveContainer" containerID="0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3" Jan 31 06:34:40 crc kubenswrapper[4712]: E0131 06:34:40.282279 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3\": container with ID starting with 0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3 not found: ID does not exist" containerID="0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.282317 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3"} err="failed to get container status \"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3\": rpc error: code = NotFound desc = could not find container \"0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3\": container with ID starting with 0b3ca2897db8ab131a9c2f2432329e3ccd61bca875b14a1fcd743a159f0348f3 not found: ID does not exist" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.282336 4712 scope.go:117] "RemoveContainer" containerID="6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637" Jan 31 06:34:40 crc kubenswrapper[4712]: E0131 06:34:40.282902 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637\": container with ID starting with 6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637 not found: ID does not exist" containerID="6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.282963 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637"} err="failed to get container status \"6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637\": rpc error: code = NotFound desc = could not find container \"6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637\": container with ID starting with 6d8555086affbb4f8770ce0ddbed6414c7a0af25ade5d2087f2ce477be479637 not found: ID does not exist" Jan 31 06:34:40 crc kubenswrapper[4712]: I0131 06:34:40.524243 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" path="/var/lib/kubelet/pods/8040fbe1-87d8-4fa9-8990-cb3bbf572ac1/volumes" Jan 31 06:34:49 crc kubenswrapper[4712]: I0131 06:34:49.504071 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:34:49 crc kubenswrapper[4712]: E0131 06:34:49.504858 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:35:02 crc kubenswrapper[4712]: I0131 06:35:02.505321 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:35:02 crc kubenswrapper[4712]: E0131 06:35:02.506353 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:35:16 crc kubenswrapper[4712]: I0131 06:35:16.504931 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:35:16 crc kubenswrapper[4712]: E0131 06:35:16.505829 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:35:29 crc kubenswrapper[4712]: I0131 06:35:29.505008 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:35:29 crc kubenswrapper[4712]: E0131 06:35:29.505921 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:35:41 crc kubenswrapper[4712]: I0131 06:35:41.504092 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:35:41 crc kubenswrapper[4712]: E0131 06:35:41.504917 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:35:52 crc kubenswrapper[4712]: I0131 06:35:52.504826 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:35:52 crc kubenswrapper[4712]: I0131 06:35:52.848034 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8"} Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.315879 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:19 crc kubenswrapper[4712]: E0131 06:36:19.323471 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="extract-utilities" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.323613 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="extract-utilities" Jan 31 06:36:19 crc kubenswrapper[4712]: E0131 06:36:19.323732 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="extract-content" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.323829 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="extract-content" Jan 31 06:36:19 crc kubenswrapper[4712]: E0131 06:36:19.323932 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="registry-server" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.324030 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="registry-server" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.324453 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="8040fbe1-87d8-4fa9-8990-cb3bbf572ac1" containerName="registry-server" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.326923 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.327526 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.461214 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4l5q\" (UniqueName: \"kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.462154 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.462315 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.564259 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4l5q\" (UniqueName: \"kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.564350 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.564386 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.564872 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.565115 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.602953 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4l5q\" (UniqueName: \"kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q\") pod \"certified-operators-9d6b9\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:19 crc kubenswrapper[4712]: I0131 06:36:19.655769 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:20 crc kubenswrapper[4712]: I0131 06:36:20.214292 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:21 crc kubenswrapper[4712]: I0131 06:36:21.130158 4712 generic.go:334] "Generic (PLEG): container finished" podID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerID="583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e" exitCode=0 Jan 31 06:36:21 crc kubenswrapper[4712]: I0131 06:36:21.130443 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerDied","Data":"583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e"} Jan 31 06:36:21 crc kubenswrapper[4712]: I0131 06:36:21.130552 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerStarted","Data":"c60e2f2b7b8a355768482bab906b7cbc351e3770dffed3daa19fabeb7ae22e8c"} Jan 31 06:36:22 crc kubenswrapper[4712]: I0131 06:36:22.142024 4712 generic.go:334] "Generic (PLEG): container finished" podID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerID="75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8" exitCode=0 Jan 31 06:36:22 crc kubenswrapper[4712]: I0131 06:36:22.142328 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerDied","Data":"75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8"} Jan 31 06:36:23 crc kubenswrapper[4712]: I0131 06:36:23.156793 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerStarted","Data":"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f"} Jan 31 06:36:23 crc kubenswrapper[4712]: I0131 06:36:23.179873 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9d6b9" podStartSLOduration=2.777340067 podStartE2EDuration="4.179845438s" podCreationTimestamp="2026-01-31 06:36:19 +0000 UTC" firstStartedPulling="2026-01-31 06:36:21.135779354 +0000 UTC m=+3447.229661195" lastFinishedPulling="2026-01-31 06:36:22.538284705 +0000 UTC m=+3448.632166566" observedRunningTime="2026-01-31 06:36:23.174706013 +0000 UTC m=+3449.268587874" watchObservedRunningTime="2026-01-31 06:36:23.179845438 +0000 UTC m=+3449.273727279" Jan 31 06:36:29 crc kubenswrapper[4712]: I0131 06:36:29.656518 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:29 crc kubenswrapper[4712]: I0131 06:36:29.657378 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:29 crc kubenswrapper[4712]: I0131 06:36:29.732679 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:30 crc kubenswrapper[4712]: I0131 06:36:30.295797 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:30 crc kubenswrapper[4712]: I0131 06:36:30.347905 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.248790 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9d6b9" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="registry-server" containerID="cri-o://189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f" gracePeriod=2 Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.690110 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.858635 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities\") pod \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.858890 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4l5q\" (UniqueName: \"kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q\") pod \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.858983 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content\") pod \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\" (UID: \"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3\") " Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.859699 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities" (OuterVolumeSpecName: "utilities") pod "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" (UID: "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.865204 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q" (OuterVolumeSpecName: "kube-api-access-q4l5q") pod "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" (UID: "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3"). InnerVolumeSpecName "kube-api-access-q4l5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.909307 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" (UID: "1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.961663 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.961694 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:36:32 crc kubenswrapper[4712]: I0131 06:36:32.961705 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4l5q\" (UniqueName: \"kubernetes.io/projected/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3-kube-api-access-q4l5q\") on node \"crc\" DevicePath \"\"" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.259306 4712 generic.go:334] "Generic (PLEG): container finished" podID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerID="189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f" exitCode=0 Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.259357 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerDied","Data":"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f"} Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.259671 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9d6b9" event={"ID":"1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3","Type":"ContainerDied","Data":"c60e2f2b7b8a355768482bab906b7cbc351e3770dffed3daa19fabeb7ae22e8c"} Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.259704 4712 scope.go:117] "RemoveContainer" containerID="189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.259371 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9d6b9" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.284421 4712 scope.go:117] "RemoveContainer" containerID="75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.313670 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.324608 4712 scope.go:117] "RemoveContainer" containerID="583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.329265 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9d6b9"] Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.367493 4712 scope.go:117] "RemoveContainer" containerID="189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f" Jan 31 06:36:33 crc kubenswrapper[4712]: E0131 06:36:33.368102 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f\": container with ID starting with 189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f not found: ID does not exist" containerID="189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.368154 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f"} err="failed to get container status \"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f\": rpc error: code = NotFound desc = could not find container \"189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f\": container with ID starting with 189c03973174d41efeccd62c8bfb14a64464bf012b12a8f86a54fc25b9f81f0f not found: ID does not exist" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.368207 4712 scope.go:117] "RemoveContainer" containerID="75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8" Jan 31 06:36:33 crc kubenswrapper[4712]: E0131 06:36:33.368779 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8\": container with ID starting with 75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8 not found: ID does not exist" containerID="75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.368809 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8"} err="failed to get container status \"75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8\": rpc error: code = NotFound desc = could not find container \"75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8\": container with ID starting with 75c4971edee3fe70e683066b6706c0d1c6bb3725ec0312eae98f79f40806bdc8 not found: ID does not exist" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.368826 4712 scope.go:117] "RemoveContainer" containerID="583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e" Jan 31 06:36:33 crc kubenswrapper[4712]: E0131 06:36:33.369238 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e\": container with ID starting with 583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e not found: ID does not exist" containerID="583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e" Jan 31 06:36:33 crc kubenswrapper[4712]: I0131 06:36:33.369301 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e"} err="failed to get container status \"583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e\": rpc error: code = NotFound desc = could not find container \"583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e\": container with ID starting with 583925f7257e0afe67075d08092ce7c4aba9ea7b51d6c0c2ba9ef1af59dac31e not found: ID does not exist" Jan 31 06:36:34 crc kubenswrapper[4712]: I0131 06:36:34.516671 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" path="/var/lib/kubelet/pods/1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3/volumes" Jan 31 06:38:12 crc kubenswrapper[4712]: I0131 06:38:12.496959 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:38:12 crc kubenswrapper[4712]: I0131 06:38:12.498935 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:38:42 crc kubenswrapper[4712]: I0131 06:38:42.496947 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:38:42 crc kubenswrapper[4712]: I0131 06:38:42.497493 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.497957 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.498600 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.498657 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.499630 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.499710 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8" gracePeriod=600 Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.838102 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8" exitCode=0 Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.838261 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8"} Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.838603 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01"} Jan 31 06:39:12 crc kubenswrapper[4712]: I0131 06:39:12.838631 4712 scope.go:117] "RemoveContainer" containerID="27d83831bffa2d59cd4338c3b9211786859c7a612cf3f244a17c060c64f719ee" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.133726 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:42 crc kubenswrapper[4712]: E0131 06:39:42.135076 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="registry-server" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.135092 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="registry-server" Jan 31 06:39:42 crc kubenswrapper[4712]: E0131 06:39:42.135110 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="extract-content" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.135117 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="extract-content" Jan 31 06:39:42 crc kubenswrapper[4712]: E0131 06:39:42.135135 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="extract-utilities" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.135142 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="extract-utilities" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.135649 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e0af94e-ddf7-422c-a6cb-2c7c2fb5d6d3" containerName="registry-server" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.137635 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.194354 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.227089 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qxdj\" (UniqueName: \"kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.227711 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.228050 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.330870 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qxdj\" (UniqueName: \"kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.331376 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.331625 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.332047 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.332104 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.357888 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qxdj\" (UniqueName: \"kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj\") pod \"redhat-marketplace-5x467\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:42 crc kubenswrapper[4712]: I0131 06:39:42.507184 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:43 crc kubenswrapper[4712]: I0131 06:39:43.016386 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:43 crc kubenswrapper[4712]: I0131 06:39:43.135845 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerStarted","Data":"2b823a5a969e26ac1587a160a74b419becba37ee0e423316303add08c1f91f15"} Jan 31 06:39:44 crc kubenswrapper[4712]: I0131 06:39:44.157224 4712 generic.go:334] "Generic (PLEG): container finished" podID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerID="9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b" exitCode=0 Jan 31 06:39:44 crc kubenswrapper[4712]: I0131 06:39:44.157775 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerDied","Data":"9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b"} Jan 31 06:39:44 crc kubenswrapper[4712]: I0131 06:39:44.166610 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:39:45 crc kubenswrapper[4712]: I0131 06:39:45.170032 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerStarted","Data":"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8"} Jan 31 06:39:46 crc kubenswrapper[4712]: I0131 06:39:46.180698 4712 generic.go:334] "Generic (PLEG): container finished" podID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerID="55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8" exitCode=0 Jan 31 06:39:46 crc kubenswrapper[4712]: I0131 06:39:46.180843 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerDied","Data":"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8"} Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.195485 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerStarted","Data":"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a"} Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.220956 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5x467" podStartSLOduration=2.609737194 podStartE2EDuration="5.220923604s" podCreationTimestamp="2026-01-31 06:39:42 +0000 UTC" firstStartedPulling="2026-01-31 06:39:44.166335103 +0000 UTC m=+3650.260216944" lastFinishedPulling="2026-01-31 06:39:46.777521473 +0000 UTC m=+3652.871403354" observedRunningTime="2026-01-31 06:39:47.212652232 +0000 UTC m=+3653.306534073" watchObservedRunningTime="2026-01-31 06:39:47.220923604 +0000 UTC m=+3653.314805445" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.718807 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.721266 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.733390 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.861457 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.861547 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.861579 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl9wl\" (UniqueName: \"kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.963388 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.963489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.963533 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl9wl\" (UniqueName: \"kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.963991 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:47 crc kubenswrapper[4712]: I0131 06:39:47.964073 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:48 crc kubenswrapper[4712]: I0131 06:39:48.000745 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl9wl\" (UniqueName: \"kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl\") pod \"community-operators-mpg56\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:48 crc kubenswrapper[4712]: I0131 06:39:48.041646 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:48 crc kubenswrapper[4712]: I0131 06:39:48.598241 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:39:48 crc kubenswrapper[4712]: W0131 06:39:48.602737 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ccc8512_452b_473c_b73d_587066c7e5eb.slice/crio-f35eddcecd382aa2f91d61725c48d6a6d913463f3c9c18a6d65166b3e50df90b WatchSource:0}: Error finding container f35eddcecd382aa2f91d61725c48d6a6d913463f3c9c18a6d65166b3e50df90b: Status 404 returned error can't find the container with id f35eddcecd382aa2f91d61725c48d6a6d913463f3c9c18a6d65166b3e50df90b Jan 31 06:39:49 crc kubenswrapper[4712]: I0131 06:39:49.274633 4712 generic.go:334] "Generic (PLEG): container finished" podID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerID="a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f" exitCode=0 Jan 31 06:39:49 crc kubenswrapper[4712]: I0131 06:39:49.274708 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerDied","Data":"a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f"} Jan 31 06:39:49 crc kubenswrapper[4712]: I0131 06:39:49.274926 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerStarted","Data":"f35eddcecd382aa2f91d61725c48d6a6d913463f3c9c18a6d65166b3e50df90b"} Jan 31 06:39:51 crc kubenswrapper[4712]: I0131 06:39:51.299146 4712 generic.go:334] "Generic (PLEG): container finished" podID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerID="bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad" exitCode=0 Jan 31 06:39:51 crc kubenswrapper[4712]: I0131 06:39:51.299226 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerDied","Data":"bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad"} Jan 31 06:39:52 crc kubenswrapper[4712]: I0131 06:39:52.312721 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerStarted","Data":"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08"} Jan 31 06:39:52 crc kubenswrapper[4712]: I0131 06:39:52.340587 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mpg56" podStartSLOduration=2.904638319 podStartE2EDuration="5.340559674s" podCreationTimestamp="2026-01-31 06:39:47 +0000 UTC" firstStartedPulling="2026-01-31 06:39:49.277854687 +0000 UTC m=+3655.371736528" lastFinishedPulling="2026-01-31 06:39:51.713776022 +0000 UTC m=+3657.807657883" observedRunningTime="2026-01-31 06:39:52.331680018 +0000 UTC m=+3658.425561859" watchObservedRunningTime="2026-01-31 06:39:52.340559674 +0000 UTC m=+3658.434441515" Jan 31 06:39:52 crc kubenswrapper[4712]: I0131 06:39:52.516105 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:52 crc kubenswrapper[4712]: I0131 06:39:52.516167 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:52 crc kubenswrapper[4712]: I0131 06:39:52.560666 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:53 crc kubenswrapper[4712]: I0131 06:39:53.378523 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:54 crc kubenswrapper[4712]: I0131 06:39:54.105773 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.358456 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5x467" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="registry-server" containerID="cri-o://fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a" gracePeriod=2 Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.877578 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.947913 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qxdj\" (UniqueName: \"kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj\") pod \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.948034 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities\") pod \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.948242 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content\") pod \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\" (UID: \"207ffbe6-874c-4237-8ccc-aa626d2c04a5\") " Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.949514 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities" (OuterVolumeSpecName: "utilities") pod "207ffbe6-874c-4237-8ccc-aa626d2c04a5" (UID: "207ffbe6-874c-4237-8ccc-aa626d2c04a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.955320 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj" (OuterVolumeSpecName: "kube-api-access-7qxdj") pod "207ffbe6-874c-4237-8ccc-aa626d2c04a5" (UID: "207ffbe6-874c-4237-8ccc-aa626d2c04a5"). InnerVolumeSpecName "kube-api-access-7qxdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:39:55 crc kubenswrapper[4712]: I0131 06:39:55.973906 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "207ffbe6-874c-4237-8ccc-aa626d2c04a5" (UID: "207ffbe6-874c-4237-8ccc-aa626d2c04a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.051057 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qxdj\" (UniqueName: \"kubernetes.io/projected/207ffbe6-874c-4237-8ccc-aa626d2c04a5-kube-api-access-7qxdj\") on node \"crc\" DevicePath \"\"" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.051097 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.051122 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207ffbe6-874c-4237-8ccc-aa626d2c04a5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.374983 4712 generic.go:334] "Generic (PLEG): container finished" podID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerID="fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a" exitCode=0 Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.375041 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerDied","Data":"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a"} Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.375073 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5x467" event={"ID":"207ffbe6-874c-4237-8ccc-aa626d2c04a5","Type":"ContainerDied","Data":"2b823a5a969e26ac1587a160a74b419becba37ee0e423316303add08c1f91f15"} Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.375093 4712 scope.go:117] "RemoveContainer" containerID="fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.375310 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5x467" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.403527 4712 scope.go:117] "RemoveContainer" containerID="55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.423643 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.428695 4712 scope.go:117] "RemoveContainer" containerID="9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.432983 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5x467"] Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.479830 4712 scope.go:117] "RemoveContainer" containerID="fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a" Jan 31 06:39:56 crc kubenswrapper[4712]: E0131 06:39:56.480214 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a\": container with ID starting with fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a not found: ID does not exist" containerID="fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.480250 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a"} err="failed to get container status \"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a\": rpc error: code = NotFound desc = could not find container \"fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a\": container with ID starting with fe9ca01c098feb6399f86b4e1b931080152bca13a5fe8a36db5d1257e720112a not found: ID does not exist" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.480275 4712 scope.go:117] "RemoveContainer" containerID="55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8" Jan 31 06:39:56 crc kubenswrapper[4712]: E0131 06:39:56.480681 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8\": container with ID starting with 55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8 not found: ID does not exist" containerID="55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.480707 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8"} err="failed to get container status \"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8\": rpc error: code = NotFound desc = could not find container \"55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8\": container with ID starting with 55c3ae164716edfa225e7d5f54b9aaa942473fcd6ded9c6c0c41da97e16c06a8 not found: ID does not exist" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.480724 4712 scope.go:117] "RemoveContainer" containerID="9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b" Jan 31 06:39:56 crc kubenswrapper[4712]: E0131 06:39:56.480963 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b\": container with ID starting with 9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b not found: ID does not exist" containerID="9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.480991 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b"} err="failed to get container status \"9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b\": rpc error: code = NotFound desc = could not find container \"9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b\": container with ID starting with 9e289eede46db563b9a4c83213a9d694d0fe6ddcb1deb4cc3b3ba3424ac3170b not found: ID does not exist" Jan 31 06:39:56 crc kubenswrapper[4712]: I0131 06:39:56.516422 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" path="/var/lib/kubelet/pods/207ffbe6-874c-4237-8ccc-aa626d2c04a5/volumes" Jan 31 06:39:58 crc kubenswrapper[4712]: I0131 06:39:58.042718 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:58 crc kubenswrapper[4712]: I0131 06:39:58.043057 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:58 crc kubenswrapper[4712]: I0131 06:39:58.085075 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:58 crc kubenswrapper[4712]: I0131 06:39:58.441837 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:39:59 crc kubenswrapper[4712]: I0131 06:39:59.507398 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.414328 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mpg56" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="registry-server" containerID="cri-o://f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08" gracePeriod=2 Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.902931 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.961930 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities\") pod \"8ccc8512-452b-473c-b73d-587066c7e5eb\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.961982 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content\") pod \"8ccc8512-452b-473c-b73d-587066c7e5eb\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.962144 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl9wl\" (UniqueName: \"kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl\") pod \"8ccc8512-452b-473c-b73d-587066c7e5eb\" (UID: \"8ccc8512-452b-473c-b73d-587066c7e5eb\") " Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.962872 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities" (OuterVolumeSpecName: "utilities") pod "8ccc8512-452b-473c-b73d-587066c7e5eb" (UID: "8ccc8512-452b-473c-b73d-587066c7e5eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.963431 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:40:00 crc kubenswrapper[4712]: I0131 06:40:00.976747 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl" (OuterVolumeSpecName: "kube-api-access-bl9wl") pod "8ccc8512-452b-473c-b73d-587066c7e5eb" (UID: "8ccc8512-452b-473c-b73d-587066c7e5eb"). InnerVolumeSpecName "kube-api-access-bl9wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.026594 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ccc8512-452b-473c-b73d-587066c7e5eb" (UID: "8ccc8512-452b-473c-b73d-587066c7e5eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.065890 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl9wl\" (UniqueName: \"kubernetes.io/projected/8ccc8512-452b-473c-b73d-587066c7e5eb-kube-api-access-bl9wl\") on node \"crc\" DevicePath \"\"" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.065930 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ccc8512-452b-473c-b73d-587066c7e5eb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.425762 4712 generic.go:334] "Generic (PLEG): container finished" podID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerID="f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08" exitCode=0 Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.425857 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerDied","Data":"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08"} Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.426111 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mpg56" event={"ID":"8ccc8512-452b-473c-b73d-587066c7e5eb","Type":"ContainerDied","Data":"f35eddcecd382aa2f91d61725c48d6a6d913463f3c9c18a6d65166b3e50df90b"} Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.426139 4712 scope.go:117] "RemoveContainer" containerID="f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.425891 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mpg56" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.460502 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.470137 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mpg56"] Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.476470 4712 scope.go:117] "RemoveContainer" containerID="bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.504426 4712 scope.go:117] "RemoveContainer" containerID="a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.544312 4712 scope.go:117] "RemoveContainer" containerID="f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08" Jan 31 06:40:01 crc kubenswrapper[4712]: E0131 06:40:01.544939 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08\": container with ID starting with f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08 not found: ID does not exist" containerID="f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.545000 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08"} err="failed to get container status \"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08\": rpc error: code = NotFound desc = could not find container \"f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08\": container with ID starting with f1acd51345ed5392a482018e4e7b26baae5ede77610fbe7a2f227fc49b87ad08 not found: ID does not exist" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.545035 4712 scope.go:117] "RemoveContainer" containerID="bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad" Jan 31 06:40:01 crc kubenswrapper[4712]: E0131 06:40:01.545539 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad\": container with ID starting with bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad not found: ID does not exist" containerID="bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.545565 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad"} err="failed to get container status \"bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad\": rpc error: code = NotFound desc = could not find container \"bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad\": container with ID starting with bd65ee49193101cede036c0a0ccb34a937e61313a31dc5f1de723bab8c26e6ad not found: ID does not exist" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.545583 4712 scope.go:117] "RemoveContainer" containerID="a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f" Jan 31 06:40:01 crc kubenswrapper[4712]: E0131 06:40:01.545993 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f\": container with ID starting with a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f not found: ID does not exist" containerID="a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f" Jan 31 06:40:01 crc kubenswrapper[4712]: I0131 06:40:01.546025 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f"} err="failed to get container status \"a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f\": rpc error: code = NotFound desc = could not find container \"a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f\": container with ID starting with a4172fcf6a471056b8edca705a6a8ffe72bcbafb27480ebdc77ea4b6c88e349f not found: ID does not exist" Jan 31 06:40:02 crc kubenswrapper[4712]: I0131 06:40:02.517389 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" path="/var/lib/kubelet/pods/8ccc8512-452b-473c-b73d-587066c7e5eb/volumes" Jan 31 06:41:12 crc kubenswrapper[4712]: I0131 06:41:12.497959 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:41:12 crc kubenswrapper[4712]: I0131 06:41:12.498530 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:41:42 crc kubenswrapper[4712]: I0131 06:41:42.497696 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:41:42 crc kubenswrapper[4712]: I0131 06:41:42.498833 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.497481 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.498183 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.498238 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.499019 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.499071 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" gracePeriod=600 Jan 31 06:42:12 crc kubenswrapper[4712]: E0131 06:42:12.626421 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.656074 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" exitCode=0 Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.656110 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01"} Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.656161 4712 scope.go:117] "RemoveContainer" containerID="03371805fdbfbbba5ea2e6c838231a1cc306737958865e8175d9d830d95cc4c8" Jan 31 06:42:12 crc kubenswrapper[4712]: I0131 06:42:12.656836 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:42:12 crc kubenswrapper[4712]: E0131 06:42:12.657135 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:42:25 crc kubenswrapper[4712]: I0131 06:42:25.505960 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:42:25 crc kubenswrapper[4712]: E0131 06:42:25.506761 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:42:40 crc kubenswrapper[4712]: I0131 06:42:40.504287 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:42:40 crc kubenswrapper[4712]: E0131 06:42:40.505084 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:42:54 crc kubenswrapper[4712]: I0131 06:42:54.512627 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:42:54 crc kubenswrapper[4712]: E0131 06:42:54.519141 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:43:06 crc kubenswrapper[4712]: I0131 06:43:06.504715 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:43:06 crc kubenswrapper[4712]: E0131 06:43:06.505599 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:43:19 crc kubenswrapper[4712]: I0131 06:43:19.504673 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:43:19 crc kubenswrapper[4712]: E0131 06:43:19.505489 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:43:34 crc kubenswrapper[4712]: I0131 06:43:34.679047 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:43:34 crc kubenswrapper[4712]: E0131 06:43:34.683010 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:43:47 crc kubenswrapper[4712]: I0131 06:43:47.504601 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:43:47 crc kubenswrapper[4712]: E0131 06:43:47.505378 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:00 crc kubenswrapper[4712]: I0131 06:44:00.504398 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:00 crc kubenswrapper[4712]: E0131 06:44:00.505235 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:13 crc kubenswrapper[4712]: I0131 06:44:13.504744 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:13 crc kubenswrapper[4712]: E0131 06:44:13.505600 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:26 crc kubenswrapper[4712]: I0131 06:44:26.505059 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:26 crc kubenswrapper[4712]: E0131 06:44:26.505891 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:37 crc kubenswrapper[4712]: I0131 06:44:37.505237 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:37 crc kubenswrapper[4712]: E0131 06:44:37.506929 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:48 crc kubenswrapper[4712]: I0131 06:44:48.505396 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:48 crc kubenswrapper[4712]: E0131 06:44:48.506249 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:44:59 crc kubenswrapper[4712]: I0131 06:44:59.505114 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:44:59 crc kubenswrapper[4712]: E0131 06:44:59.505948 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.175091 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2"] Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.175907 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="extract-utilities" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.175936 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="extract-utilities" Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.175951 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="extract-utilities" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.175960 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="extract-utilities" Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.175982 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="extract-content" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.175991 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="extract-content" Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.176002 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.176010 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.176026 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.176033 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: E0131 06:45:00.176060 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="extract-content" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.176069 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="extract-content" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.176317 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="207ffbe6-874c-4237-8ccc-aa626d2c04a5" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.176343 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ccc8512-452b-473c-b73d-587066c7e5eb" containerName="registry-server" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.177202 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.180919 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.181186 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.189319 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.189919 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7pj7\" (UniqueName: \"kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.190025 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.190899 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2"] Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.293294 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7pj7\" (UniqueName: \"kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.293372 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.293447 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.294538 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.307860 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.311330 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7pj7\" (UniqueName: \"kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7\") pod \"collect-profiles-29497365-tpsd2\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.506780 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:00 crc kubenswrapper[4712]: I0131 06:45:00.987175 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2"] Jan 31 06:45:01 crc kubenswrapper[4712]: I0131 06:45:01.530927 4712 generic.go:334] "Generic (PLEG): container finished" podID="f06982a9-ee52-49e1-bb14-fa58a96f03f3" containerID="e68d5936e2a117740cd2cece53365f5d90abf03b18df1b1b52fbf720b25b7d69" exitCode=0 Jan 31 06:45:01 crc kubenswrapper[4712]: I0131 06:45:01.531271 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" event={"ID":"f06982a9-ee52-49e1-bb14-fa58a96f03f3","Type":"ContainerDied","Data":"e68d5936e2a117740cd2cece53365f5d90abf03b18df1b1b52fbf720b25b7d69"} Jan 31 06:45:01 crc kubenswrapper[4712]: I0131 06:45:01.531312 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" event={"ID":"f06982a9-ee52-49e1-bb14-fa58a96f03f3","Type":"ContainerStarted","Data":"d6542973ff4f3e8cd5f8a1bf8e7d2cfbf3071903d30ec8861d4b284bd9d41665"} Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.154737 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.257195 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7pj7\" (UniqueName: \"kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7\") pod \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.257418 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume\") pod \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.257550 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume\") pod \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\" (UID: \"f06982a9-ee52-49e1-bb14-fa58a96f03f3\") " Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.258422 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume" (OuterVolumeSpecName: "config-volume") pod "f06982a9-ee52-49e1-bb14-fa58a96f03f3" (UID: "f06982a9-ee52-49e1-bb14-fa58a96f03f3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.273784 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f06982a9-ee52-49e1-bb14-fa58a96f03f3" (UID: "f06982a9-ee52-49e1-bb14-fa58a96f03f3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.273901 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7" (OuterVolumeSpecName: "kube-api-access-s7pj7") pod "f06982a9-ee52-49e1-bb14-fa58a96f03f3" (UID: "f06982a9-ee52-49e1-bb14-fa58a96f03f3"). InnerVolumeSpecName "kube-api-access-s7pj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.360418 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f06982a9-ee52-49e1-bb14-fa58a96f03f3-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.360482 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f06982a9-ee52-49e1-bb14-fa58a96f03f3-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.360509 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7pj7\" (UniqueName: \"kubernetes.io/projected/f06982a9-ee52-49e1-bb14-fa58a96f03f3-kube-api-access-s7pj7\") on node \"crc\" DevicePath \"\"" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.553611 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" event={"ID":"f06982a9-ee52-49e1-bb14-fa58a96f03f3","Type":"ContainerDied","Data":"d6542973ff4f3e8cd5f8a1bf8e7d2cfbf3071903d30ec8861d4b284bd9d41665"} Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.553842 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6542973ff4f3e8cd5f8a1bf8e7d2cfbf3071903d30ec8861d4b284bd9d41665" Jan 31 06:45:03 crc kubenswrapper[4712]: I0131 06:45:03.553905 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497365-tpsd2" Jan 31 06:45:04 crc kubenswrapper[4712]: I0131 06:45:04.239029 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s"] Jan 31 06:45:04 crc kubenswrapper[4712]: I0131 06:45:04.250287 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497320-wpq6s"] Jan 31 06:45:04 crc kubenswrapper[4712]: I0131 06:45:04.514826 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f57bed8-d02f-4e4d-9ca8-11f061b37df5" path="/var/lib/kubelet/pods/9f57bed8-d02f-4e4d-9ca8-11f061b37df5/volumes" Jan 31 06:45:14 crc kubenswrapper[4712]: I0131 06:45:14.511448 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:45:14 crc kubenswrapper[4712]: E0131 06:45:14.512279 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:45:22 crc kubenswrapper[4712]: I0131 06:45:22.205576 4712 scope.go:117] "RemoveContainer" containerID="ba0a0bc0080c32364c7aac525e2dd1185e9292e16ce1933f89846ce597c618c5" Jan 31 06:45:25 crc kubenswrapper[4712]: I0131 06:45:25.504435 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:45:25 crc kubenswrapper[4712]: E0131 06:45:25.505344 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:45:38 crc kubenswrapper[4712]: I0131 06:45:38.504075 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:45:38 crc kubenswrapper[4712]: E0131 06:45:38.504923 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:45:53 crc kubenswrapper[4712]: I0131 06:45:53.506355 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:45:53 crc kubenswrapper[4712]: E0131 06:45:53.507389 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:46:08 crc kubenswrapper[4712]: I0131 06:46:08.504936 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:46:08 crc kubenswrapper[4712]: E0131 06:46:08.505769 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:46:23 crc kubenswrapper[4712]: I0131 06:46:23.504359 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:46:23 crc kubenswrapper[4712]: E0131 06:46:23.505141 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:46:38 crc kubenswrapper[4712]: I0131 06:46:38.505353 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:46:38 crc kubenswrapper[4712]: E0131 06:46:38.506337 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:46:51 crc kubenswrapper[4712]: I0131 06:46:51.504219 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:46:51 crc kubenswrapper[4712]: E0131 06:46:51.505231 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:47:02 crc kubenswrapper[4712]: I0131 06:47:02.504816 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:47:02 crc kubenswrapper[4712]: E0131 06:47:02.506239 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.318924 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:14 crc kubenswrapper[4712]: E0131 06:47:14.319989 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06982a9-ee52-49e1-bb14-fa58a96f03f3" containerName="collect-profiles" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.320010 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06982a9-ee52-49e1-bb14-fa58a96f03f3" containerName="collect-profiles" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.320245 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="f06982a9-ee52-49e1-bb14-fa58a96f03f3" containerName="collect-profiles" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.322103 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.335599 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.385949 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vqqq\" (UniqueName: \"kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.386189 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.386287 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.488313 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vqqq\" (UniqueName: \"kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.488489 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.488516 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.489192 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.489855 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.510690 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.531033 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vqqq\" (UniqueName: \"kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq\") pod \"certified-operators-xpdbh\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:14 crc kubenswrapper[4712]: I0131 06:47:14.647736 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.238102 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.754433 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0955279-d54e-4bb1-9551-4842b02268e5" containerID="9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c" exitCode=0 Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.754666 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerDied","Data":"9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c"} Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.754990 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerStarted","Data":"955a9d93ef2d99211aa75169bea1ae66228c1829001adbfdea0bfcaa5b4a0574"} Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.756905 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:47:15 crc kubenswrapper[4712]: I0131 06:47:15.760408 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171"} Jan 31 06:47:17 crc kubenswrapper[4712]: I0131 06:47:17.779624 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0955279-d54e-4bb1-9551-4842b02268e5" containerID="5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6" exitCode=0 Jan 31 06:47:17 crc kubenswrapper[4712]: I0131 06:47:17.779703 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerDied","Data":"5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6"} Jan 31 06:47:19 crc kubenswrapper[4712]: I0131 06:47:19.802350 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerStarted","Data":"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde"} Jan 31 06:47:19 crc kubenswrapper[4712]: I0131 06:47:19.834849 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xpdbh" podStartSLOduration=2.951017302 podStartE2EDuration="5.834825386s" podCreationTimestamp="2026-01-31 06:47:14 +0000 UTC" firstStartedPulling="2026-01-31 06:47:15.756624547 +0000 UTC m=+4101.850506388" lastFinishedPulling="2026-01-31 06:47:18.640432641 +0000 UTC m=+4104.734314472" observedRunningTime="2026-01-31 06:47:19.829527516 +0000 UTC m=+4105.923409357" watchObservedRunningTime="2026-01-31 06:47:19.834825386 +0000 UTC m=+4105.928707227" Jan 31 06:47:24 crc kubenswrapper[4712]: I0131 06:47:24.648499 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:24 crc kubenswrapper[4712]: I0131 06:47:24.650355 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:24 crc kubenswrapper[4712]: I0131 06:47:24.719518 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:24 crc kubenswrapper[4712]: I0131 06:47:24.901327 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:24 crc kubenswrapper[4712]: I0131 06:47:24.963369 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:26 crc kubenswrapper[4712]: I0131 06:47:26.867519 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xpdbh" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="registry-server" containerID="cri-o://54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde" gracePeriod=2 Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.831467 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.880314 4712 generic.go:334] "Generic (PLEG): container finished" podID="c0955279-d54e-4bb1-9551-4842b02268e5" containerID="54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde" exitCode=0 Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.880362 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerDied","Data":"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde"} Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.880381 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpdbh" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.880406 4712 scope.go:117] "RemoveContainer" containerID="54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.880392 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpdbh" event={"ID":"c0955279-d54e-4bb1-9551-4842b02268e5","Type":"ContainerDied","Data":"955a9d93ef2d99211aa75169bea1ae66228c1829001adbfdea0bfcaa5b4a0574"} Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.913981 4712 scope.go:117] "RemoveContainer" containerID="5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.938333 4712 scope.go:117] "RemoveContainer" containerID="9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.978041 4712 scope.go:117] "RemoveContainer" containerID="54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde" Jan 31 06:47:27 crc kubenswrapper[4712]: E0131 06:47:27.978804 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde\": container with ID starting with 54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde not found: ID does not exist" containerID="54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.978850 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde"} err="failed to get container status \"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde\": rpc error: code = NotFound desc = could not find container \"54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde\": container with ID starting with 54fdd1e2cb72b855591068bd7c5f37922b2321ca012fdb621e1669c5f3a3ebde not found: ID does not exist" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.978878 4712 scope.go:117] "RemoveContainer" containerID="5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6" Jan 31 06:47:27 crc kubenswrapper[4712]: E0131 06:47:27.979497 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6\": container with ID starting with 5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6 not found: ID does not exist" containerID="5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.979519 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6"} err="failed to get container status \"5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6\": rpc error: code = NotFound desc = could not find container \"5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6\": container with ID starting with 5db04e5edfd8bfdb55030bf1dcfa3d78a21a80998c88abdaf498266939cac2d6 not found: ID does not exist" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.979535 4712 scope.go:117] "RemoveContainer" containerID="9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c" Jan 31 06:47:27 crc kubenswrapper[4712]: E0131 06:47:27.980267 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c\": container with ID starting with 9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c not found: ID does not exist" containerID="9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.980302 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c"} err="failed to get container status \"9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c\": rpc error: code = NotFound desc = could not find container \"9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c\": container with ID starting with 9909b4c99f3137d0331622c5b4ac5c77a5d1ca38f8a2dc3bc882f91bbea11c9c not found: ID does not exist" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.989498 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities\") pod \"c0955279-d54e-4bb1-9551-4842b02268e5\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.990558 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities" (OuterVolumeSpecName: "utilities") pod "c0955279-d54e-4bb1-9551-4842b02268e5" (UID: "c0955279-d54e-4bb1-9551-4842b02268e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.990810 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content\") pod \"c0955279-d54e-4bb1-9551-4842b02268e5\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.990977 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vqqq\" (UniqueName: \"kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq\") pod \"c0955279-d54e-4bb1-9551-4842b02268e5\" (UID: \"c0955279-d54e-4bb1-9551-4842b02268e5\") " Jan 31 06:47:27 crc kubenswrapper[4712]: I0131 06:47:27.991742 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:27.999717 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq" (OuterVolumeSpecName: "kube-api-access-6vqqq") pod "c0955279-d54e-4bb1-9551-4842b02268e5" (UID: "c0955279-d54e-4bb1-9551-4842b02268e5"). InnerVolumeSpecName "kube-api-access-6vqqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.041602 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0955279-d54e-4bb1-9551-4842b02268e5" (UID: "c0955279-d54e-4bb1-9551-4842b02268e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.094402 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0955279-d54e-4bb1-9551-4842b02268e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.094458 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vqqq\" (UniqueName: \"kubernetes.io/projected/c0955279-d54e-4bb1-9551-4842b02268e5-kube-api-access-6vqqq\") on node \"crc\" DevicePath \"\"" Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.216958 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.235134 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xpdbh"] Jan 31 06:47:28 crc kubenswrapper[4712]: I0131 06:47:28.516827 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" path="/var/lib/kubelet/pods/c0955279-d54e-4bb1-9551-4842b02268e5/volumes" Jan 31 06:49:42 crc kubenswrapper[4712]: I0131 06:49:42.497730 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:49:42 crc kubenswrapper[4712]: I0131 06:49:42.498314 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:50:12 crc kubenswrapper[4712]: I0131 06:50:12.496983 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:50:12 crc kubenswrapper[4712]: I0131 06:50:12.497586 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:50:23 crc kubenswrapper[4712]: I0131 06:50:23.694477 4712 generic.go:334] "Generic (PLEG): container finished" podID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" containerID="48f486d920885812aafd31457a07b646cc2d1dc654e252caa00a0fcc45a369d5" exitCode=0 Jan 31 06:50:23 crc kubenswrapper[4712]: I0131 06:50:23.694568 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0f27224a-407b-4803-afd6-9c1caa3fbfdf","Type":"ContainerDied","Data":"48f486d920885812aafd31457a07b646cc2d1dc654e252caa00a0fcc45a369d5"} Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.046660 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.144241 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.144443 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.144532 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.144559 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145051 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145103 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145159 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145249 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145348 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn96k\" (UniqueName: \"kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k\") pod \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\" (UID: \"0f27224a-407b-4803-afd6-9c1caa3fbfdf\") " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.145676 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.146249 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data" (OuterVolumeSpecName: "config-data") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.146769 4712 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.146787 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.149915 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.152421 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.152690 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k" (OuterVolumeSpecName: "kube-api-access-mn96k") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "kube-api-access-mn96k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.175334 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.178237 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.181381 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.225072 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "0f27224a-407b-4803-afd6-9c1caa3fbfdf" (UID: "0f27224a-407b-4803-afd6-9c1caa3fbfdf"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.229348 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:25 crc kubenswrapper[4712]: E0131 06:50:25.229985 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="registry-server" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230010 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="registry-server" Jan 31 06:50:25 crc kubenswrapper[4712]: E0131 06:50:25.230036 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="extract-content" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230048 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="extract-content" Jan 31 06:50:25 crc kubenswrapper[4712]: E0131 06:50:25.230074 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="extract-utilities" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230112 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="extract-utilities" Jan 31 06:50:25 crc kubenswrapper[4712]: E0131 06:50:25.230136 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" containerName="tempest-tests-tempest-tests-runner" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230147 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" containerName="tempest-tests-tempest-tests-runner" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230426 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f27224a-407b-4803-afd6-9c1caa3fbfdf" containerName="tempest-tests-tempest-tests-runner" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.230459 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0955279-d54e-4bb1-9551-4842b02268e5" containerName="registry-server" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.232294 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248261 4712 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ssh-key\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248294 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn96k\" (UniqueName: \"kubernetes.io/projected/0f27224a-407b-4803-afd6-9c1caa3fbfdf-kube-api-access-mn96k\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248305 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248347 4712 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248361 4712 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0f27224a-407b-4803-afd6-9c1caa3fbfdf-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248371 4712 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/0f27224a-407b-4803-afd6-9c1caa3fbfdf-ca-certs\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.248383 4712 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/0f27224a-407b-4803-afd6-9c1caa3fbfdf-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.259258 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.286311 4712 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.350457 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.350848 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.351048 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6l9h\" (UniqueName: \"kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.351253 4712 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.454916 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6l9h\" (UniqueName: \"kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.455017 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.455100 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.455701 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.455890 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.473418 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6l9h\" (UniqueName: \"kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h\") pod \"community-operators-6tff6\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.620674 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.718991 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"0f27224a-407b-4803-afd6-9c1caa3fbfdf","Type":"ContainerDied","Data":"356e0d8705b3eb370d799af9325b97695445745d60c408e35a7a7dab5903d0e0"} Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.719073 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="356e0d8705b3eb370d799af9325b97695445745d60c408e35a7a7dab5903d0e0" Jan 31 06:50:25 crc kubenswrapper[4712]: I0131 06:50:25.719210 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 31 06:50:26 crc kubenswrapper[4712]: I0131 06:50:26.212223 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:26 crc kubenswrapper[4712]: I0131 06:50:26.729004 4712 generic.go:334] "Generic (PLEG): container finished" podID="5a670f90-c12c-4386-8705-a6629ca644bc" containerID="155eb2648b537d5dce7cfe4e58e555b11cc95ff1309baa0c8c70d91acd3b1072" exitCode=0 Jan 31 06:50:26 crc kubenswrapper[4712]: I0131 06:50:26.729114 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerDied","Data":"155eb2648b537d5dce7cfe4e58e555b11cc95ff1309baa0c8c70d91acd3b1072"} Jan 31 06:50:26 crc kubenswrapper[4712]: I0131 06:50:26.729317 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerStarted","Data":"80e7f21092a764f81d7a6651a703219f191d19e38a7879ecf86cf563b984c6f1"} Jan 31 06:50:28 crc kubenswrapper[4712]: I0131 06:50:28.750881 4712 generic.go:334] "Generic (PLEG): container finished" podID="5a670f90-c12c-4386-8705-a6629ca644bc" containerID="1cefd2701a95f11ec9fcf968ee83762208f5b6ee40d7fbce03a44ff819e3f942" exitCode=0 Jan 31 06:50:28 crc kubenswrapper[4712]: I0131 06:50:28.750940 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerDied","Data":"1cefd2701a95f11ec9fcf968ee83762208f5b6ee40d7fbce03a44ff819e3f942"} Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.073681 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.075275 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.077633 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-fq6c7" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.084858 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.139636 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97dqz\" (UniqueName: \"kubernetes.io/projected/acea85f9-50c3-4f55-8928-dbcf70e29709-kube-api-access-97dqz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.140111 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.241879 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.241965 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97dqz\" (UniqueName: \"kubernetes.io/projected/acea85f9-50c3-4f55-8928-dbcf70e29709-kube-api-access-97dqz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.242460 4712 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.269239 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97dqz\" (UniqueName: \"kubernetes.io/projected/acea85f9-50c3-4f55-8928-dbcf70e29709-kube-api-access-97dqz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.272090 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"acea85f9-50c3-4f55-8928-dbcf70e29709\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.432343 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.764570 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerStarted","Data":"e08c3ecbf42a9e14d2d8d47428d5197beb2ef035882c5045bf83c14a522eca7d"} Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.788972 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6tff6" podStartSLOduration=2.284975586 podStartE2EDuration="4.788941833s" podCreationTimestamp="2026-01-31 06:50:25 +0000 UTC" firstStartedPulling="2026-01-31 06:50:26.730736046 +0000 UTC m=+4292.824617887" lastFinishedPulling="2026-01-31 06:50:29.234702293 +0000 UTC m=+4295.328584134" observedRunningTime="2026-01-31 06:50:29.781388117 +0000 UTC m=+4295.875269958" watchObservedRunningTime="2026-01-31 06:50:29.788941833 +0000 UTC m=+4295.882823674" Jan 31 06:50:29 crc kubenswrapper[4712]: I0131 06:50:29.915273 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 31 06:50:30 crc kubenswrapper[4712]: I0131 06:50:30.777010 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"acea85f9-50c3-4f55-8928-dbcf70e29709","Type":"ContainerStarted","Data":"727f0da295526edffeac33833c92e9403a24882a18dd5ba628cf84a84348239f"} Jan 31 06:50:31 crc kubenswrapper[4712]: I0131 06:50:31.792219 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"acea85f9-50c3-4f55-8928-dbcf70e29709","Type":"ContainerStarted","Data":"b3387d88d5c27d6e4b6dd8a46a1d56c2224a157efc55c632d1d05f22539da601"} Jan 31 06:50:31 crc kubenswrapper[4712]: I0131 06:50:31.812530 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.015919161 podStartE2EDuration="2.812513469s" podCreationTimestamp="2026-01-31 06:50:29 +0000 UTC" firstStartedPulling="2026-01-31 06:50:30.002144633 +0000 UTC m=+4296.096026474" lastFinishedPulling="2026-01-31 06:50:30.798738941 +0000 UTC m=+4296.892620782" observedRunningTime="2026-01-31 06:50:31.807928586 +0000 UTC m=+4297.901810447" watchObservedRunningTime="2026-01-31 06:50:31.812513469 +0000 UTC m=+4297.906395310" Jan 31 06:50:35 crc kubenswrapper[4712]: I0131 06:50:35.621960 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:35 crc kubenswrapper[4712]: I0131 06:50:35.622267 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:35 crc kubenswrapper[4712]: I0131 06:50:35.668849 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:35 crc kubenswrapper[4712]: I0131 06:50:35.867243 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.597152 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.600047 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.609234 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.687289 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.687354 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjnpw\" (UniqueName: \"kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.687377 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.788986 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.789041 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjnpw\" (UniqueName: \"kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.789066 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.789719 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.789750 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.802102 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.804947 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.819557 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjnpw\" (UniqueName: \"kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw\") pod \"redhat-marketplace-wcc66\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.833213 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.889833 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg2q5\" (UniqueName: \"kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.889890 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.889914 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.920212 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.991629 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg2q5\" (UniqueName: \"kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.991973 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.992069 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.992633 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:36 crc kubenswrapper[4712]: I0131 06:50:36.992776 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.025194 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg2q5\" (UniqueName: \"kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5\") pod \"redhat-operators-djfqw\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.168572 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.428053 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:37 crc kubenswrapper[4712]: W0131 06:50:37.433062 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded0f98df_1a44_4081_8742_a4253f7ab9d5.slice/crio-12c3ad4e77d69b2c40641d077e899b6d7fe79c40eb8b99ae305c91f5aa5717d3 WatchSource:0}: Error finding container 12c3ad4e77d69b2c40641d077e899b6d7fe79c40eb8b99ae305c91f5aa5717d3: Status 404 returned error can't find the container with id 12c3ad4e77d69b2c40641d077e899b6d7fe79c40eb8b99ae305c91f5aa5717d3 Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.724460 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.850749 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerID="848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9" exitCode=0 Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.850828 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerDied","Data":"848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9"} Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.850860 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerStarted","Data":"12c3ad4e77d69b2c40641d077e899b6d7fe79c40eb8b99ae305c91f5aa5717d3"} Jan 31 06:50:37 crc kubenswrapper[4712]: I0131 06:50:37.853607 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerStarted","Data":"a963bf3a5fdc6668674c4068fc8db80a697ce7ae1e4ed74e8806510b1bb59cdc"} Jan 31 06:50:38 crc kubenswrapper[4712]: I0131 06:50:38.872060 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerID="6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe" exitCode=0 Jan 31 06:50:38 crc kubenswrapper[4712]: I0131 06:50:38.872184 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerDied","Data":"6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe"} Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.186912 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.187266 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6tff6" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="registry-server" containerID="cri-o://e08c3ecbf42a9e14d2d8d47428d5197beb2ef035882c5045bf83c14a522eca7d" gracePeriod=2 Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.889994 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerID="6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe" exitCode=0 Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.890303 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerDied","Data":"6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe"} Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.907872 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerStarted","Data":"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89"} Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.922873 4712 generic.go:334] "Generic (PLEG): container finished" podID="5a670f90-c12c-4386-8705-a6629ca644bc" containerID="e08c3ecbf42a9e14d2d8d47428d5197beb2ef035882c5045bf83c14a522eca7d" exitCode=0 Jan 31 06:50:39 crc kubenswrapper[4712]: I0131 06:50:39.922934 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerDied","Data":"e08c3ecbf42a9e14d2d8d47428d5197beb2ef035882c5045bf83c14a522eca7d"} Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.179932 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.266575 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6l9h\" (UniqueName: \"kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h\") pod \"5a670f90-c12c-4386-8705-a6629ca644bc\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.266647 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities\") pod \"5a670f90-c12c-4386-8705-a6629ca644bc\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.266745 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content\") pod \"5a670f90-c12c-4386-8705-a6629ca644bc\" (UID: \"5a670f90-c12c-4386-8705-a6629ca644bc\") " Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.273319 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h" (OuterVolumeSpecName: "kube-api-access-m6l9h") pod "5a670f90-c12c-4386-8705-a6629ca644bc" (UID: "5a670f90-c12c-4386-8705-a6629ca644bc"). InnerVolumeSpecName "kube-api-access-m6l9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.275676 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities" (OuterVolumeSpecName: "utilities") pod "5a670f90-c12c-4386-8705-a6629ca644bc" (UID: "5a670f90-c12c-4386-8705-a6629ca644bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.330548 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a670f90-c12c-4386-8705-a6629ca644bc" (UID: "5a670f90-c12c-4386-8705-a6629ca644bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.369139 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6l9h\" (UniqueName: \"kubernetes.io/projected/5a670f90-c12c-4386-8705-a6629ca644bc-kube-api-access-m6l9h\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.369189 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.369360 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a670f90-c12c-4386-8705-a6629ca644bc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.936884 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerStarted","Data":"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6"} Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.940537 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerID="ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89" exitCode=0 Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.940616 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerDied","Data":"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89"} Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.947386 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6tff6" event={"ID":"5a670f90-c12c-4386-8705-a6629ca644bc","Type":"ContainerDied","Data":"80e7f21092a764f81d7a6651a703219f191d19e38a7879ecf86cf563b984c6f1"} Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.947477 4712 scope.go:117] "RemoveContainer" containerID="e08c3ecbf42a9e14d2d8d47428d5197beb2ef035882c5045bf83c14a522eca7d" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.947630 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6tff6" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.967388 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wcc66" podStartSLOduration=2.512398072 podStartE2EDuration="4.967359033s" podCreationTimestamp="2026-01-31 06:50:36 +0000 UTC" firstStartedPulling="2026-01-31 06:50:37.853047744 +0000 UTC m=+4303.946929585" lastFinishedPulling="2026-01-31 06:50:40.308008685 +0000 UTC m=+4306.401890546" observedRunningTime="2026-01-31 06:50:40.956284031 +0000 UTC m=+4307.050165892" watchObservedRunningTime="2026-01-31 06:50:40.967359033 +0000 UTC m=+4307.061240874" Jan 31 06:50:40 crc kubenswrapper[4712]: I0131 06:50:40.981286 4712 scope.go:117] "RemoveContainer" containerID="1cefd2701a95f11ec9fcf968ee83762208f5b6ee40d7fbce03a44ff819e3f942" Jan 31 06:50:41 crc kubenswrapper[4712]: I0131 06:50:41.003573 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:41 crc kubenswrapper[4712]: I0131 06:50:41.012758 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6tff6"] Jan 31 06:50:41 crc kubenswrapper[4712]: I0131 06:50:41.296599 4712 scope.go:117] "RemoveContainer" containerID="155eb2648b537d5dce7cfe4e58e555b11cc95ff1309baa0c8c70d91acd3b1072" Jan 31 06:50:41 crc kubenswrapper[4712]: I0131 06:50:41.996604 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerStarted","Data":"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd"} Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.026749 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-djfqw" podStartSLOduration=3.524870017 podStartE2EDuration="6.026723303s" podCreationTimestamp="2026-01-31 06:50:36 +0000 UTC" firstStartedPulling="2026-01-31 06:50:38.875200647 +0000 UTC m=+4304.969082488" lastFinishedPulling="2026-01-31 06:50:41.377053933 +0000 UTC m=+4307.470935774" observedRunningTime="2026-01-31 06:50:42.023416072 +0000 UTC m=+4308.117297923" watchObservedRunningTime="2026-01-31 06:50:42.026723303 +0000 UTC m=+4308.120605134" Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.497296 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.497375 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.497428 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.498313 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.498380 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171" gracePeriod=600 Jan 31 06:50:42 crc kubenswrapper[4712]: I0131 06:50:42.520345 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" path="/var/lib/kubelet/pods/5a670f90-c12c-4386-8705-a6629ca644bc/volumes" Jan 31 06:50:43 crc kubenswrapper[4712]: I0131 06:50:43.013049 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171" exitCode=0 Jan 31 06:50:43 crc kubenswrapper[4712]: I0131 06:50:43.013241 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171"} Jan 31 06:50:43 crc kubenswrapper[4712]: I0131 06:50:43.013683 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2"} Jan 31 06:50:43 crc kubenswrapper[4712]: I0131 06:50:43.013709 4712 scope.go:117] "RemoveContainer" containerID="d029b638d121d5456eb58198b2f41b666236c5800fe5b33898acf54a373fdb01" Jan 31 06:50:46 crc kubenswrapper[4712]: I0131 06:50:46.920576 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:46 crc kubenswrapper[4712]: I0131 06:50:46.922110 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:46 crc kubenswrapper[4712]: I0131 06:50:46.974478 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:47 crc kubenswrapper[4712]: I0131 06:50:47.098779 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:47 crc kubenswrapper[4712]: I0131 06:50:47.170462 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:47 crc kubenswrapper[4712]: I0131 06:50:47.170515 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:47 crc kubenswrapper[4712]: I0131 06:50:47.586765 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:48 crc kubenswrapper[4712]: I0131 06:50:48.217437 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-djfqw" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="registry-server" probeResult="failure" output=< Jan 31 06:50:48 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 06:50:48 crc kubenswrapper[4712]: > Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.068725 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wcc66" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="registry-server" containerID="cri-o://30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6" gracePeriod=2 Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.519220 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.661101 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content\") pod \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.661235 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities\") pod \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.661256 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjnpw\" (UniqueName: \"kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw\") pod \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\" (UID: \"ed0f98df-1a44-4081-8742-a4253f7ab9d5\") " Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.662452 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities" (OuterVolumeSpecName: "utilities") pod "ed0f98df-1a44-4081-8742-a4253f7ab9d5" (UID: "ed0f98df-1a44-4081-8742-a4253f7ab9d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.667495 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw" (OuterVolumeSpecName: "kube-api-access-zjnpw") pod "ed0f98df-1a44-4081-8742-a4253f7ab9d5" (UID: "ed0f98df-1a44-4081-8742-a4253f7ab9d5"). InnerVolumeSpecName "kube-api-access-zjnpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.692972 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed0f98df-1a44-4081-8742-a4253f7ab9d5" (UID: "ed0f98df-1a44-4081-8742-a4253f7ab9d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.763386 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.763421 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0f98df-1a44-4081-8742-a4253f7ab9d5-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:49 crc kubenswrapper[4712]: I0131 06:50:49.763434 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjnpw\" (UniqueName: \"kubernetes.io/projected/ed0f98df-1a44-4081-8742-a4253f7ab9d5-kube-api-access-zjnpw\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.078762 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerID="30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6" exitCode=0 Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.078810 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerDied","Data":"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6"} Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.078862 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wcc66" event={"ID":"ed0f98df-1a44-4081-8742-a4253f7ab9d5","Type":"ContainerDied","Data":"12c3ad4e77d69b2c40641d077e899b6d7fe79c40eb8b99ae305c91f5aa5717d3"} Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.078875 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wcc66" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.078889 4712 scope.go:117] "RemoveContainer" containerID="30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.100302 4712 scope.go:117] "RemoveContainer" containerID="6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.120639 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.133728 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wcc66"] Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.392108 4712 scope.go:117] "RemoveContainer" containerID="848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.455402 4712 scope.go:117] "RemoveContainer" containerID="30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6" Jan 31 06:50:50 crc kubenswrapper[4712]: E0131 06:50:50.456392 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6\": container with ID starting with 30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6 not found: ID does not exist" containerID="30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.456456 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6"} err="failed to get container status \"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6\": rpc error: code = NotFound desc = could not find container \"30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6\": container with ID starting with 30926c03fc3cc3560b4c3ad3801832715be691129b10c7960fc8bc4e913b78a6 not found: ID does not exist" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.456495 4712 scope.go:117] "RemoveContainer" containerID="6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe" Jan 31 06:50:50 crc kubenswrapper[4712]: E0131 06:50:50.457655 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe\": container with ID starting with 6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe not found: ID does not exist" containerID="6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.457687 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe"} err="failed to get container status \"6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe\": rpc error: code = NotFound desc = could not find container \"6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe\": container with ID starting with 6706b64acb2132828ed6a2bf1afc4ffcaadb0b09fd74b90ec6c7a5fb984f8efe not found: ID does not exist" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.457716 4712 scope.go:117] "RemoveContainer" containerID="848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9" Jan 31 06:50:50 crc kubenswrapper[4712]: E0131 06:50:50.458025 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9\": container with ID starting with 848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9 not found: ID does not exist" containerID="848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.458057 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9"} err="failed to get container status \"848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9\": rpc error: code = NotFound desc = could not find container \"848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9\": container with ID starting with 848ce4c8d7fc78bf91590d35381e1ef86fc3e88821ed977aff9d51854555d8e9 not found: ID does not exist" Jan 31 06:50:50 crc kubenswrapper[4712]: I0131 06:50:50.515654 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" path="/var/lib/kubelet/pods/ed0f98df-1a44-4081-8742-a4253f7ab9d5/volumes" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.181490 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f5lzb/must-gather-4kqrc"] Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182630 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182649 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182661 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="extract-utilities" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182669 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="extract-utilities" Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182686 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="extract-content" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182694 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="extract-content" Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182724 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="extract-utilities" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182731 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="extract-utilities" Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182751 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="extract-content" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182761 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="extract-content" Jan 31 06:50:53 crc kubenswrapper[4712]: E0131 06:50:53.182781 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.182788 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.183031 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0f98df-1a44-4081-8742-a4253f7ab9d5" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.183070 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a670f90-c12c-4386-8705-a6629ca644bc" containerName="registry-server" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.184332 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.186591 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-f5lzb"/"openshift-service-ca.crt" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.186967 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-f5lzb"/"kube-root-ca.crt" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.187789 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-f5lzb"/"default-dockercfg-lf4s9" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.206912 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-f5lzb/must-gather-4kqrc"] Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.240318 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.240874 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5968\" (UniqueName: \"kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.343036 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.343241 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5968\" (UniqueName: \"kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.343652 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.369874 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5968\" (UniqueName: \"kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968\") pod \"must-gather-4kqrc\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.506796 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:50:53 crc kubenswrapper[4712]: I0131 06:50:53.983331 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-f5lzb/must-gather-4kqrc"] Jan 31 06:50:53 crc kubenswrapper[4712]: W0131 06:50:53.988838 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16ecbcaf_1c2a_4461_adc6_69ee1316c9d7.slice/crio-f0fbbbcaea0b607b4f4b78832ae577a3e18654d70980bdbeb270c44fe4d46dfd WatchSource:0}: Error finding container f0fbbbcaea0b607b4f4b78832ae577a3e18654d70980bdbeb270c44fe4d46dfd: Status 404 returned error can't find the container with id f0fbbbcaea0b607b4f4b78832ae577a3e18654d70980bdbeb270c44fe4d46dfd Jan 31 06:50:54 crc kubenswrapper[4712]: I0131 06:50:54.115028 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" event={"ID":"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7","Type":"ContainerStarted","Data":"f0fbbbcaea0b607b4f4b78832ae577a3e18654d70980bdbeb270c44fe4d46dfd"} Jan 31 06:50:57 crc kubenswrapper[4712]: I0131 06:50:57.216231 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:57 crc kubenswrapper[4712]: I0131 06:50:57.273986 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:57 crc kubenswrapper[4712]: I0131 06:50:57.457195 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:50:58 crc kubenswrapper[4712]: I0131 06:50:58.160964 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" event={"ID":"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7","Type":"ContainerStarted","Data":"821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd"} Jan 31 06:50:58 crc kubenswrapper[4712]: I0131 06:50:58.161389 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" event={"ID":"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7","Type":"ContainerStarted","Data":"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251"} Jan 31 06:50:58 crc kubenswrapper[4712]: I0131 06:50:58.180459 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" podStartSLOduration=1.499384495 podStartE2EDuration="5.180434472s" podCreationTimestamp="2026-01-31 06:50:53 +0000 UTC" firstStartedPulling="2026-01-31 06:50:53.994093222 +0000 UTC m=+4320.087975063" lastFinishedPulling="2026-01-31 06:50:57.675143199 +0000 UTC m=+4323.769025040" observedRunningTime="2026-01-31 06:50:58.176618139 +0000 UTC m=+4324.270499980" watchObservedRunningTime="2026-01-31 06:50:58.180434472 +0000 UTC m=+4324.274316323" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.168154 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-djfqw" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="registry-server" containerID="cri-o://bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd" gracePeriod=2 Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.632614 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.692726 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg2q5\" (UniqueName: \"kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5\") pod \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.692835 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content\") pod \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.693083 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities\") pod \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\" (UID: \"ed4ee683-b0a3-474a-a46a-20e9e04e92db\") " Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.693822 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities" (OuterVolumeSpecName: "utilities") pod "ed4ee683-b0a3-474a-a46a-20e9e04e92db" (UID: "ed4ee683-b0a3-474a-a46a-20e9e04e92db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.698711 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5" (OuterVolumeSpecName: "kube-api-access-rg2q5") pod "ed4ee683-b0a3-474a-a46a-20e9e04e92db" (UID: "ed4ee683-b0a3-474a-a46a-20e9e04e92db"). InnerVolumeSpecName "kube-api-access-rg2q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.795342 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.795662 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg2q5\" (UniqueName: \"kubernetes.io/projected/ed4ee683-b0a3-474a-a46a-20e9e04e92db-kube-api-access-rg2q5\") on node \"crc\" DevicePath \"\"" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.826903 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed4ee683-b0a3-474a-a46a-20e9e04e92db" (UID: "ed4ee683-b0a3-474a-a46a-20e9e04e92db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:50:59 crc kubenswrapper[4712]: I0131 06:50:59.897959 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed4ee683-b0a3-474a-a46a-20e9e04e92db-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.179838 4712 generic.go:334] "Generic (PLEG): container finished" podID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerID="bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd" exitCode=0 Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.179917 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerDied","Data":"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd"} Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.181297 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-djfqw" event={"ID":"ed4ee683-b0a3-474a-a46a-20e9e04e92db","Type":"ContainerDied","Data":"a963bf3a5fdc6668674c4068fc8db80a697ce7ae1e4ed74e8806510b1bb59cdc"} Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.179944 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-djfqw" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.181364 4712 scope.go:117] "RemoveContainer" containerID="bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.208342 4712 scope.go:117] "RemoveContainer" containerID="ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.231319 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.250258 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-djfqw"] Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.256354 4712 scope.go:117] "RemoveContainer" containerID="6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.280393 4712 scope.go:117] "RemoveContainer" containerID="bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd" Jan 31 06:51:00 crc kubenswrapper[4712]: E0131 06:51:00.281116 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd\": container with ID starting with bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd not found: ID does not exist" containerID="bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.281183 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd"} err="failed to get container status \"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd\": rpc error: code = NotFound desc = could not find container \"bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd\": container with ID starting with bb8bfb5fd26034deea4a86b87329417a2478b99072d9519194d3b317363278cd not found: ID does not exist" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.281221 4712 scope.go:117] "RemoveContainer" containerID="ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89" Jan 31 06:51:00 crc kubenswrapper[4712]: E0131 06:51:00.281656 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89\": container with ID starting with ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89 not found: ID does not exist" containerID="ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.281684 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89"} err="failed to get container status \"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89\": rpc error: code = NotFound desc = could not find container \"ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89\": container with ID starting with ce18601ff924d00f30935e113b9966a1b563b02f936cbb935ab9c4dd4a5b4c89 not found: ID does not exist" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.281701 4712 scope.go:117] "RemoveContainer" containerID="6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe" Jan 31 06:51:00 crc kubenswrapper[4712]: E0131 06:51:00.282208 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe\": container with ID starting with 6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe not found: ID does not exist" containerID="6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.282244 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe"} err="failed to get container status \"6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe\": rpc error: code = NotFound desc = could not find container \"6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe\": container with ID starting with 6d77faa10fd3b67b12cf4fb2bef4aaa18ce976674664f91ffdd6735356edfebe not found: ID does not exist" Jan 31 06:51:00 crc kubenswrapper[4712]: I0131 06:51:00.514587 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" path="/var/lib/kubelet/pods/ed4ee683-b0a3-474a-a46a-20e9e04e92db/volumes" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.566989 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-vfgrv"] Jan 31 06:51:02 crc kubenswrapper[4712]: E0131 06:51:02.568843 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="extract-utilities" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.568866 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="extract-utilities" Jan 31 06:51:02 crc kubenswrapper[4712]: E0131 06:51:02.568877 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="registry-server" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.568884 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="registry-server" Jan 31 06:51:02 crc kubenswrapper[4712]: E0131 06:51:02.568898 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="extract-content" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.568906 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="extract-content" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.569152 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed4ee683-b0a3-474a-a46a-20e9e04e92db" containerName="registry-server" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.570377 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.751088 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.751156 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5l28\" (UniqueName: \"kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.852723 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5l28\" (UniqueName: \"kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.852918 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.853132 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.883309 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5l28\" (UniqueName: \"kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28\") pod \"crc-debug-vfgrv\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: I0131 06:51:02.890975 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:02 crc kubenswrapper[4712]: W0131 06:51:02.920347 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4607702c_b208_4750_8fe0_878c5d005768.slice/crio-0a5e900ee8b8f201040b8e229e6e373992a326e62d26d567b960fd6cfacc5c58 WatchSource:0}: Error finding container 0a5e900ee8b8f201040b8e229e6e373992a326e62d26d567b960fd6cfacc5c58: Status 404 returned error can't find the container with id 0a5e900ee8b8f201040b8e229e6e373992a326e62d26d567b960fd6cfacc5c58 Jan 31 06:51:03 crc kubenswrapper[4712]: I0131 06:51:03.225385 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" event={"ID":"4607702c-b208-4750-8fe0-878c5d005768","Type":"ContainerStarted","Data":"0a5e900ee8b8f201040b8e229e6e373992a326e62d26d567b960fd6cfacc5c58"} Jan 31 06:51:16 crc kubenswrapper[4712]: I0131 06:51:16.363314 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" event={"ID":"4607702c-b208-4750-8fe0-878c5d005768","Type":"ContainerStarted","Data":"386fc86e951c776271686a15c51d95d511762f23af0aa4dd1ff2d8fd922db08f"} Jan 31 06:51:16 crc kubenswrapper[4712]: I0131 06:51:16.384825 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" podStartSLOduration=1.940583422 podStartE2EDuration="14.384803197s" podCreationTimestamp="2026-01-31 06:51:02 +0000 UTC" firstStartedPulling="2026-01-31 06:51:02.922659163 +0000 UTC m=+4329.016540994" lastFinishedPulling="2026-01-31 06:51:15.366878928 +0000 UTC m=+4341.460760769" observedRunningTime="2026-01-31 06:51:16.380781128 +0000 UTC m=+4342.474662969" watchObservedRunningTime="2026-01-31 06:51:16.384803197 +0000 UTC m=+4342.478685038" Jan 31 06:51:40 crc kubenswrapper[4712]: I0131 06:51:40.592677 4712 generic.go:334] "Generic (PLEG): container finished" podID="4607702c-b208-4750-8fe0-878c5d005768" containerID="386fc86e951c776271686a15c51d95d511762f23af0aa4dd1ff2d8fd922db08f" exitCode=0 Jan 31 06:51:40 crc kubenswrapper[4712]: I0131 06:51:40.592756 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" event={"ID":"4607702c-b208-4750-8fe0-878c5d005768","Type":"ContainerDied","Data":"386fc86e951c776271686a15c51d95d511762f23af0aa4dd1ff2d8fd922db08f"} Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.713735 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.750002 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-vfgrv"] Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.760257 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-vfgrv"] Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.859514 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5l28\" (UniqueName: \"kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28\") pod \"4607702c-b208-4750-8fe0-878c5d005768\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.859700 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host\") pod \"4607702c-b208-4750-8fe0-878c5d005768\" (UID: \"4607702c-b208-4750-8fe0-878c5d005768\") " Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.859814 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host" (OuterVolumeSpecName: "host") pod "4607702c-b208-4750-8fe0-878c5d005768" (UID: "4607702c-b208-4750-8fe0-878c5d005768"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.860340 4712 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4607702c-b208-4750-8fe0-878c5d005768-host\") on node \"crc\" DevicePath \"\"" Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.867432 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28" (OuterVolumeSpecName: "kube-api-access-c5l28") pod "4607702c-b208-4750-8fe0-878c5d005768" (UID: "4607702c-b208-4750-8fe0-878c5d005768"). InnerVolumeSpecName "kube-api-access-c5l28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:51:41 crc kubenswrapper[4712]: I0131 06:51:41.961979 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5l28\" (UniqueName: \"kubernetes.io/projected/4607702c-b208-4750-8fe0-878c5d005768-kube-api-access-c5l28\") on node \"crc\" DevicePath \"\"" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.515434 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4607702c-b208-4750-8fe0-878c5d005768" path="/var/lib/kubelet/pods/4607702c-b208-4750-8fe0-878c5d005768/volumes" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.614875 4712 scope.go:117] "RemoveContainer" containerID="386fc86e951c776271686a15c51d95d511762f23af0aa4dd1ff2d8fd922db08f" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.614968 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-vfgrv" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.959850 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-klkx7"] Jan 31 06:51:42 crc kubenswrapper[4712]: E0131 06:51:42.960645 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4607702c-b208-4750-8fe0-878c5d005768" containerName="container-00" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.960662 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="4607702c-b208-4750-8fe0-878c5d005768" containerName="container-00" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.960917 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="4607702c-b208-4750-8fe0-878c5d005768" containerName="container-00" Jan 31 06:51:42 crc kubenswrapper[4712]: I0131 06:51:42.961556 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.085322 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffr7d\" (UniqueName: \"kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.085529 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.187386 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.187516 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffr7d\" (UniqueName: \"kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.187622 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.208703 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffr7d\" (UniqueName: \"kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d\") pod \"crc-debug-klkx7\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.278876 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.629830 4712 generic.go:334] "Generic (PLEG): container finished" podID="9184ec7a-9608-4c64-b30e-04cc501cf7ba" containerID="d02a9a0e83d8227d95967f31fec12ba4d8ecf5984154b2c40d425401ed39ccf3" exitCode=1 Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.630107 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" event={"ID":"9184ec7a-9608-4c64-b30e-04cc501cf7ba","Type":"ContainerDied","Data":"d02a9a0e83d8227d95967f31fec12ba4d8ecf5984154b2c40d425401ed39ccf3"} Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.630334 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" event={"ID":"9184ec7a-9608-4c64-b30e-04cc501cf7ba","Type":"ContainerStarted","Data":"edf0dae79f162a33faaa65b3b61f0f42c8b6ea69a74269c1ee6b978101b2f036"} Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.672493 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-klkx7"] Jan 31 06:51:43 crc kubenswrapper[4712]: I0131 06:51:43.681397 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f5lzb/crc-debug-klkx7"] Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.759389 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.928527 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffr7d\" (UniqueName: \"kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d\") pod \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.928606 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host\") pod \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\" (UID: \"9184ec7a-9608-4c64-b30e-04cc501cf7ba\") " Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.928828 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host" (OuterVolumeSpecName: "host") pod "9184ec7a-9608-4c64-b30e-04cc501cf7ba" (UID: "9184ec7a-9608-4c64-b30e-04cc501cf7ba"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.929353 4712 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9184ec7a-9608-4c64-b30e-04cc501cf7ba-host\") on node \"crc\" DevicePath \"\"" Jan 31 06:51:44 crc kubenswrapper[4712]: I0131 06:51:44.937945 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d" (OuterVolumeSpecName: "kube-api-access-ffr7d") pod "9184ec7a-9608-4c64-b30e-04cc501cf7ba" (UID: "9184ec7a-9608-4c64-b30e-04cc501cf7ba"). InnerVolumeSpecName "kube-api-access-ffr7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:51:45 crc kubenswrapper[4712]: I0131 06:51:45.032088 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffr7d\" (UniqueName: \"kubernetes.io/projected/9184ec7a-9608-4c64-b30e-04cc501cf7ba-kube-api-access-ffr7d\") on node \"crc\" DevicePath \"\"" Jan 31 06:51:45 crc kubenswrapper[4712]: I0131 06:51:45.663000 4712 scope.go:117] "RemoveContainer" containerID="d02a9a0e83d8227d95967f31fec12ba4d8ecf5984154b2c40d425401ed39ccf3" Jan 31 06:51:45 crc kubenswrapper[4712]: I0131 06:51:45.663528 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/crc-debug-klkx7" Jan 31 06:51:46 crc kubenswrapper[4712]: I0131 06:51:46.516061 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9184ec7a-9608-4c64-b30e-04cc501cf7ba" path="/var/lib/kubelet/pods/9184ec7a-9608-4c64-b30e-04cc501cf7ba/volumes" Jan 31 06:52:23 crc kubenswrapper[4712]: I0131 06:52:23.714927 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff848578-hzzpm_913d1fd5-e3ce-4632-abda-a7161638d494/barbican-api/0.log" Jan 31 06:52:23 crc kubenswrapper[4712]: I0131 06:52:23.865874 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff848578-hzzpm_913d1fd5-e3ce-4632-abda-a7161638d494/barbican-api-log/0.log" Jan 31 06:52:23 crc kubenswrapper[4712]: I0131 06:52:23.971703 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-77f65dc848-97sk5_8bac1193-72ba-4208-9e62-9eae63196d1c/barbican-keystone-listener/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.035069 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-77f65dc848-97sk5_8bac1193-72ba-4208-9e62-9eae63196d1c/barbican-keystone-listener-log/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.189550 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f578d49bf-ffhtj_872694e2-8988-499c-b05b-3597e7d4e327/barbican-worker/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.219602 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f578d49bf-ffhtj_872694e2-8988-499c-b05b-3597e7d4e327/barbican-worker-log/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.490139 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/ceilometer-central-agent/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.497540 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-d9662_d61c9236-0514-4d46-b7a7-49f8d5e63685/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.582161 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/ceilometer-notification-agent/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.686733 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/proxy-httpd/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.698426 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/sg-core/0.log" Jan 31 06:52:24 crc kubenswrapper[4712]: I0131 06:52:24.973228 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f1761086-d010-4dcc-91fb-a9503805de81/cinder-api-log/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.031941 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f1761086-d010-4dcc-91fb-a9503805de81/cinder-api/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.095756 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2118bcf2-8afd-4e35-b53b-6998f5c6a5cc/cinder-scheduler/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.249829 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2118bcf2-8afd-4e35-b53b-6998f5c6a5cc/probe/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.350279 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6_033a2ac5-1d6e-4c75-9792-d54b4da7ef85/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.519076 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp_8a24ba49-5360-4c66-a06d-36f6915384a9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.601514 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/init/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.868223 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/dnsmasq-dns/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.879111 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c_d52a5e5f-6195-4acc-b30b-c872b19bbd10/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:25 crc kubenswrapper[4712]: I0131 06:52:25.885052 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/init/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.067950 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b83454be-e489-429c-a4b3-8914ee18daa4/glance-httpd/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.093613 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b83454be-e489-429c-a4b3-8914ee18daa4/glance-log/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.299094 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_baae8e5b-9153-449a-92f2-34eb6cb7dbd3/glance-httpd/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.343031 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_baae8e5b-9153-449a-92f2-34eb6cb7dbd3/glance-log/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.419840 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-t9596_1b9eefb9-d787-4042-b8cf-b1d7160c09a4/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:26 crc kubenswrapper[4712]: I0131 06:52:26.872627 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-f6t8j_a7836756-e240-4f18-b3cc-f820d8dd026d/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.001953 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-76bf8584d5-c59jx_623c2dc0-c837-436c-ade9-19c8a8fedfb6/keystone-api/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.105241 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_2f56d276-e653-4e6b-b5a7-e530babf7175/kube-state-metrics/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.598692 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-x74g8_1e610581-bd59-418a-901a-7a37acc85442/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.616038 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_5c874b7f-26e1-436e-9cdc-a440a86b72ec/memcached/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.882218 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6445df85d9-7dknt_2b8f18ba-9096-4d08-9d1f-4efed6b7883a/neutron-api/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.909132 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb_104962fa-0d0e-40b4-aacc-94ae160c761d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:27 crc kubenswrapper[4712]: I0131 06:52:27.924987 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6445df85d9-7dknt_2b8f18ba-9096-4d08-9d1f-4efed6b7883a/neutron-httpd/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.363475 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4c060068-c993-4028-8639-64e08eb08bd4/nova-cell0-conductor-conductor/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.558210 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e2c19455-9dc4-45de-afd3-d55b91c729c6/nova-api-log/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.578924 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7/nova-cell1-conductor-conductor/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.717051 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ab75dbe3-c922-4440-b310-6fe0d2201274/nova-cell1-novncproxy-novncproxy/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.808574 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e2c19455-9dc4-45de-afd3-d55b91c729c6/nova-api-api/0.log" Jan 31 06:52:28 crc kubenswrapper[4712]: I0131 06:52:28.865602 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9fpgv_293b2cc8-393a-4043-ac70-89b0a519de4b/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.067033 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c032ae9d-cfbd-4184-8abb-5ccb6e158a0c/nova-metadata-log/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.386826 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9/nova-scheduler-scheduler/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.416218 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/mysql-bootstrap/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.651658 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/mysql-bootstrap/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.679059 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/galera/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.716618 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/mysql-bootstrap/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.933550 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/mysql-bootstrap/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.979065 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5403021d-e6d4-4e4e-aa8e-8879f65f9f36/openstackclient/0.log" Jan 31 06:52:29 crc kubenswrapper[4712]: I0131 06:52:29.979215 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/galera/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.204217 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c032ae9d-cfbd-4184-8abb-5ccb6e158a0c/nova-metadata-metadata/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.229730 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-wxrfr_cdd7091c-c446-44de-a591-89bcd4901347/openstack-network-exporter/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.312046 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server-init/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.488666 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server-init/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.508490 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovs-vswitchd/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.514427 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.558951 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-xbh95_16fc3ee9-9ecc-45b7-8410-d9a6b2da5863/ovn-controller/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.786335 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2971f42d-daa7-474b-8d5e-06f5d943d091/openstack-network-exporter/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.817612 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-2lsmd_66f934d4-4354-4d23-80e4-0fd0b6facf41/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:30 crc kubenswrapper[4712]: I0131 06:52:30.818067 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2971f42d-daa7-474b-8d5e-06f5d943d091/ovn-northd/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.462159 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3eb28efb-abfd-4570-8282-f0189e523fa3/openstack-network-exporter/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.490952 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3eb28efb-abfd-4570-8282-f0189e523fa3/ovsdbserver-nb/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.606327 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_58cd6591-0ba3-4102-b8de-79b3c7d77f8e/openstack-network-exporter/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.720829 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_58cd6591-0ba3-4102-b8de-79b3c7d77f8e/ovsdbserver-sb/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.914940 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-778dc5b584-k7lvt_c1366fba-a1ea-415f-8b63-43648b6b16fb/placement-log/0.log" Jan 31 06:52:31 crc kubenswrapper[4712]: I0131 06:52:31.927657 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-778dc5b584-k7lvt_c1366fba-a1ea-415f-8b63-43648b6b16fb/placement-api/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.011994 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/setup-container/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.242480 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/rabbitmq/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.272684 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/setup-container/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.279285 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/setup-container/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.464529 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/rabbitmq/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.485930 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/setup-container/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.535317 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm_8e2c8610-b420-4018-b0d3-62afdc779dba/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.729003 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-drvv8_76c5b162-1ced-457b-90f1-fbf85edf746d/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.792250 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl_b7519335-f8e2-4211-8b99-a9fc3ac51150/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.823666 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-w4v27_ef660f01-216f-4f2e-89b8-55e0fb24c506/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:32 crc kubenswrapper[4712]: I0131 06:52:32.934693 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-cd9mf_d0cbadc8-9972-41d6-9313-0337cb84f72d/ssh-known-hosts-edpm-deployment/0.log" Jan 31 06:52:33 crc kubenswrapper[4712]: I0131 06:52:33.645888 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8557fb7df9-bhzkt_2a8507c3-4b91-4b81-83ba-4bb63b3745f0/proxy-server/0.log" Jan 31 06:52:33 crc kubenswrapper[4712]: I0131 06:52:33.741833 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8557fb7df9-bhzkt_2a8507c3-4b91-4b81-83ba-4bb63b3745f0/proxy-httpd/0.log" Jan 31 06:52:33 crc kubenswrapper[4712]: I0131 06:52:33.758920 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-xnfzc_108e0e4f-9137-4e8c-aec6-032c1585852c/swift-ring-rebalance/0.log" Jan 31 06:52:33 crc kubenswrapper[4712]: I0131 06:52:33.942155 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-auditor/0.log" Jan 31 06:52:33 crc kubenswrapper[4712]: I0131 06:52:33.951054 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-reaper/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.012290 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-server/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.055420 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-replicator/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.190061 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-auditor/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.199200 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-replicator/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.202837 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-server/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.273054 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-auditor/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.312615 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-updater/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.437396 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-replicator/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.456948 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-expirer/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.475794 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-server/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.562206 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-updater/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.576119 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/rsync/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.661726 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/swift-recon-cron/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.821324 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf_467fe339-07fd-4c51-95df-4c8c123e2c03/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:34 crc kubenswrapper[4712]: I0131 06:52:34.890902 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_0f27224a-407b-4803-afd6-9c1caa3fbfdf/tempest-tests-tempest-tests-runner/0.log" Jan 31 06:52:35 crc kubenswrapper[4712]: I0131 06:52:35.015132 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_acea85f9-50c3-4f55-8928-dbcf70e29709/test-operator-logs-container/0.log" Jan 31 06:52:35 crc kubenswrapper[4712]: I0131 06:52:35.119064 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-b462n_55c71843-8f9f-4f1b-904d-e05dc4a2ea25/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 06:52:42 crc kubenswrapper[4712]: I0131 06:52:42.497758 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:52:42 crc kubenswrapper[4712]: I0131 06:52:42.498315 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.548927 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.746440 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.752881 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.758618 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.932866 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.953922 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 06:53:00 crc kubenswrapper[4712]: I0131 06:53:00.968276 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/extract/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.185474 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-wz82s_e8223e38-a5ce-4f9f-9780-dea80a326f17/manager/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.239942 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-74szb_c0a2fda3-cecc-40e9-b15e-2d95487c7373/manager/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.411116 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-vmvmz_e48bf123-5e17-4ef3-980f-92286c95bd85/manager/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.520423 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-z96q2_b348e7d3-8e8d-484d-bd03-b27125c4fd58/manager/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.663283 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-ktrfv_623f0661-5fd0-4c1c-94b8-7cb41dc60f5f/manager/0.log" Jan 31 06:53:01 crc kubenswrapper[4712]: I0131 06:53:01.755342 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-7x6wg_f94c7f2e-7429-4be0-bad9-f3cdf0156ba9/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.039309 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-9djc7_d7b48b1c-633e-4714-a9fe-0cdb81dc946d/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.108069 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-pdzn7_d0f0cc18-6437-4c23-8ebd-f0a234fc72ff/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.436591 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-2zprf_b81b1954-214b-40b7-886d-3da110000383/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.478333 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-n9768_ddfcf8fb-8920-44fa-a439-ea5d5b6456f4/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.636843 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-czjfw_2a9eefe0-c80b-479c-a630-4b94bea52b20/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.767230 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-5vrvb_f7c522e9-e789-4fa5-9736-b6d921eba9e5/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.900007 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-qwh9v_46eafe76-b842-4889-98b5-eae45c6c9a70/manager/0.log" Jan 31 06:53:02 crc kubenswrapper[4712]: I0131 06:53:02.977422 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-f9sm2_76f477c9-248d-45e0-acdc-098fd960378c/manager/0.log" Jan 31 06:53:03 crc kubenswrapper[4712]: I0131 06:53:03.089955 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-86dfb79cc789484_cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd/manager/0.log" Jan 31 06:53:03 crc kubenswrapper[4712]: I0131 06:53:03.352724 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-757f46c65d-qrpzj_23a3b35d-3b93-4e18-b4af-665b780f3580/operator/0.log" Jan 31 06:53:03 crc kubenswrapper[4712]: I0131 06:53:03.697433 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zzwgm_f74d2a07-f376-41b5-b5c3-9305ad3a03fb/registry-server/0.log" Jan 31 06:53:03 crc kubenswrapper[4712]: I0131 06:53:03.794884 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-2fg8n_0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a/manager/0.log" Jan 31 06:53:04 crc kubenswrapper[4712]: I0131 06:53:04.361421 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-pfsg7_df910b52-e35d-4099-abe9-676b2863ee90/manager/0.log" Jan 31 06:53:04 crc kubenswrapper[4712]: I0131 06:53:04.556500 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-nr98t_4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a/operator/0.log" Jan 31 06:53:04 crc kubenswrapper[4712]: I0131 06:53:04.748084 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-kcq2k_ab883058-7fba-4506-8493-a1c290b67a44/manager/0.log" Jan 31 06:53:04 crc kubenswrapper[4712]: I0131 06:53:04.972675 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-msxfl_a0b07b7e-8267-4062-8cf1-9319d4258d13/manager/0.log" Jan 31 06:53:04 crc kubenswrapper[4712]: I0131 06:53:04.997412 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-2wkdr_6d0bc1fd-d786-402b-a7b0-4f31066900f9/manager/0.log" Jan 31 06:53:05 crc kubenswrapper[4712]: I0131 06:53:05.156813 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-p7ht5_43cff116-70bd-4e43-a6a2-cffaebca6f11/manager/0.log" Jan 31 06:53:05 crc kubenswrapper[4712]: I0131 06:53:05.200545 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6b6f655c79-fdkzc_84f27435-bc45-4501-8dda-59f399689054/manager/0.log" Jan 31 06:53:12 crc kubenswrapper[4712]: I0131 06:53:12.497218 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:53:12 crc kubenswrapper[4712]: I0131 06:53:12.497566 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:53:25 crc kubenswrapper[4712]: I0131 06:53:25.568571 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-qdzvl_dc0eb91d-cc30-4ef8-aa0b-be90744ba313/control-plane-machine-set-operator/0.log" Jan 31 06:53:26 crc kubenswrapper[4712]: I0131 06:53:26.149766 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-h8q6s_71d0663f-6a96-43ff-91fe-25bf58eb996e/machine-api-operator/0.log" Jan 31 06:53:26 crc kubenswrapper[4712]: I0131 06:53:26.177009 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-h8q6s_71d0663f-6a96-43ff-91fe-25bf58eb996e/kube-rbac-proxy/0.log" Jan 31 06:53:39 crc kubenswrapper[4712]: I0131 06:53:39.821805 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-m4x85_8a4a4023-5949-4fb1-b75d-375705a6ccd5/cert-manager-cainjector/0.log" Jan 31 06:53:39 crc kubenswrapper[4712]: I0131 06:53:39.858827 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-rzlv8_d6da4569-9d12-457b-b448-2a96889fd6d0/cert-manager-controller/0.log" Jan 31 06:53:40 crc kubenswrapper[4712]: I0131 06:53:40.030415 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-cq69c_7a3564dd-edd4-4e23-b910-084279771f4a/cert-manager-webhook/0.log" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.497220 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.497500 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.497553 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.498314 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.498369 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" gracePeriod=600 Jan 31 06:53:42 crc kubenswrapper[4712]: E0131 06:53:42.625720 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.872844 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" exitCode=0 Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.872894 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2"} Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.872937 4712 scope.go:117] "RemoveContainer" containerID="952f6981a91ea744be3bbdbee415ef3735f560f14d83a3f0c53c078852581171" Jan 31 06:53:42 crc kubenswrapper[4712]: I0131 06:53:42.873695 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:53:42 crc kubenswrapper[4712]: E0131 06:53:42.874000 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.123450 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-pjjc5_59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4/nmstate-console-plugin/0.log" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.312855 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-44b4c_f53db0c4-e85d-4db9-b1c6-12bb90a2d886/nmstate-handler/0.log" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.399667 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nnmp4_f5cd590c-d8eb-429e-8023-0a0b981d2437/kube-rbac-proxy/0.log" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.415325 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nnmp4_f5cd590c-d8eb-429e-8023-0a0b981d2437/nmstate-metrics/0.log" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.529820 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-tp5m4_9f81c039-533b-498a-8958-8b217806c189/nmstate-operator/0.log" Jan 31 06:53:52 crc kubenswrapper[4712]: I0131 06:53:52.645372 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-ftd75_737942ad-fc1c-4559-a40a-e772801f3da4/nmstate-webhook/0.log" Jan 31 06:53:57 crc kubenswrapper[4712]: I0131 06:53:57.504911 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:53:57 crc kubenswrapper[4712]: E0131 06:53:57.505979 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:10 crc kubenswrapper[4712]: I0131 06:54:10.504586 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:54:10 crc kubenswrapper[4712]: E0131 06:54:10.505641 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:20 crc kubenswrapper[4712]: I0131 06:54:20.684524 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-pwnsk_693f3580-e3b0-4892-a4be-1be046ccd732/kube-rbac-proxy/0.log" Jan 31 06:54:20 crc kubenswrapper[4712]: I0131 06:54:20.828442 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-pwnsk_693f3580-e3b0-4892-a4be-1be046ccd732/controller/0.log" Jan 31 06:54:20 crc kubenswrapper[4712]: I0131 06:54:20.915402 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.466375 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.503737 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.511809 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.522226 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.682909 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.704625 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.729777 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.734326 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.960144 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.968324 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.972057 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 06:54:21 crc kubenswrapper[4712]: I0131 06:54:21.980256 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/controller/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.178056 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/kube-rbac-proxy/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.190719 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/frr-metrics/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.191888 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/kube-rbac-proxy-frr/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.397606 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-79qrg_4171e791-18bd-4302-933e-e49a8ad12e63/frr-k8s-webhook-server/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.418629 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/reloader/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.671080 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f5c7f4d69-nvgqw_28a2f231-88ed-4f0d-941b-aa351dcabfd8/manager/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.870356 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6d4f9bc9b4-stlsv_0237140f-4dc3-42f6-8621-f96e8732af5e/webhook-server/0.log" Jan 31 06:54:22 crc kubenswrapper[4712]: I0131 06:54:22.910311 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j9fwd_5a9228a1-741f-49a7-8e70-5f2079f89755/kube-rbac-proxy/0.log" Jan 31 06:54:23 crc kubenswrapper[4712]: I0131 06:54:23.508647 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:54:23 crc kubenswrapper[4712]: E0131 06:54:23.508947 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:23 crc kubenswrapper[4712]: I0131 06:54:23.614824 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j9fwd_5a9228a1-741f-49a7-8e70-5f2079f89755/speaker/0.log" Jan 31 06:54:23 crc kubenswrapper[4712]: I0131 06:54:23.915806 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/frr/0.log" Jan 31 06:54:34 crc kubenswrapper[4712]: I0131 06:54:34.512288 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:54:34 crc kubenswrapper[4712]: E0131 06:54:34.513602 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.124244 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.567323 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.571117 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.620130 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.836520 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.874264 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 06:54:38 crc kubenswrapper[4712]: I0131 06:54:38.875243 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/extract/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.028788 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.231774 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.233658 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.236640 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.437012 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.438339 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.457971 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/extract/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.648951 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.836473 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.864860 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 06:54:39 crc kubenswrapper[4712]: I0131 06:54:39.895050 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.047821 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.070770 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.404679 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.610724 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.663715 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.742328 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/registry-server/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.820726 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.929098 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 06:54:40 crc kubenswrapper[4712]: I0131 06:54:40.953918 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.273449 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-jp69c_d31cb490-ecb9-4f62-8633-a6239f98d3a2/marketplace-operator/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.390976 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.502429 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/registry-server/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.625130 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.658105 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.672209 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.831005 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.870713 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 06:54:41 crc kubenswrapper[4712]: I0131 06:54:41.991873 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.039030 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/registry-server/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.201281 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.212444 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.231722 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.404955 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 06:54:42 crc kubenswrapper[4712]: I0131 06:54:42.436762 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 06:54:43 crc kubenswrapper[4712]: I0131 06:54:43.143319 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/registry-server/0.log" Jan 31 06:54:46 crc kubenswrapper[4712]: I0131 06:54:46.503909 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:54:46 crc kubenswrapper[4712]: E0131 06:54:46.504748 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:57 crc kubenswrapper[4712]: I0131 06:54:57.504008 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:54:57 crc kubenswrapper[4712]: E0131 06:54:57.504762 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:54:59 crc kubenswrapper[4712]: E0131 06:54:59.597210 4712 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.38:47190->38.102.83.38:44285: write tcp 38.102.83.38:47190->38.102.83.38:44285: write: broken pipe Jan 31 06:55:11 crc kubenswrapper[4712]: I0131 06:55:11.504745 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:55:11 crc kubenswrapper[4712]: E0131 06:55:11.505960 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:55:25 crc kubenswrapper[4712]: I0131 06:55:25.504500 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:55:25 crc kubenswrapper[4712]: E0131 06:55:25.505437 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:55:38 crc kubenswrapper[4712]: I0131 06:55:38.504786 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:55:38 crc kubenswrapper[4712]: E0131 06:55:38.505687 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:55:52 crc kubenswrapper[4712]: I0131 06:55:52.504372 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:55:52 crc kubenswrapper[4712]: E0131 06:55:52.505242 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:06 crc kubenswrapper[4712]: I0131 06:56:06.504654 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:56:06 crc kubenswrapper[4712]: E0131 06:56:06.505632 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:17 crc kubenswrapper[4712]: I0131 06:56:17.504340 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:56:17 crc kubenswrapper[4712]: E0131 06:56:17.505114 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:29 crc kubenswrapper[4712]: I0131 06:56:29.504353 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:56:29 crc kubenswrapper[4712]: E0131 06:56:29.506015 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:41 crc kubenswrapper[4712]: I0131 06:56:41.504235 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:56:41 crc kubenswrapper[4712]: E0131 06:56:41.506373 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:47 crc kubenswrapper[4712]: I0131 06:56:47.640473 4712 generic.go:334] "Generic (PLEG): container finished" podID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerID="f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251" exitCode=0 Jan 31 06:56:47 crc kubenswrapper[4712]: I0131 06:56:47.640553 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" event={"ID":"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7","Type":"ContainerDied","Data":"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251"} Jan 31 06:56:47 crc kubenswrapper[4712]: I0131 06:56:47.642070 4712 scope.go:117] "RemoveContainer" containerID="f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251" Jan 31 06:56:48 crc kubenswrapper[4712]: I0131 06:56:48.239352 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f5lzb_must-gather-4kqrc_16ecbcaf-1c2a-4461-adc6-69ee1316c9d7/gather/0.log" Jan 31 06:56:52 crc kubenswrapper[4712]: I0131 06:56:52.505944 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:56:52 crc kubenswrapper[4712]: E0131 06:56:52.507705 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:56:55 crc kubenswrapper[4712]: I0131 06:56:55.840525 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f5lzb/must-gather-4kqrc"] Jan 31 06:56:55 crc kubenswrapper[4712]: I0131 06:56:55.841607 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="copy" containerID="cri-o://821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd" gracePeriod=2 Jan 31 06:56:55 crc kubenswrapper[4712]: I0131 06:56:55.853713 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f5lzb/must-gather-4kqrc"] Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.369950 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f5lzb_must-gather-4kqrc_16ecbcaf-1c2a-4461-adc6-69ee1316c9d7/copy/0.log" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.370945 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.523358 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output\") pod \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.523551 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5968\" (UniqueName: \"kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968\") pod \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\" (UID: \"16ecbcaf-1c2a-4461-adc6-69ee1316c9d7\") " Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.529428 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968" (OuterVolumeSpecName: "kube-api-access-r5968") pod "16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" (UID: "16ecbcaf-1c2a-4461-adc6-69ee1316c9d7"). InnerVolumeSpecName "kube-api-access-r5968". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.626201 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5968\" (UniqueName: \"kubernetes.io/projected/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-kube-api-access-r5968\") on node \"crc\" DevicePath \"\"" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.693843 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" (UID: "16ecbcaf-1c2a-4461-adc6-69ee1316c9d7"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.728035 4712 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.778273 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f5lzb_must-gather-4kqrc_16ecbcaf-1c2a-4461-adc6-69ee1316c9d7/copy/0.log" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.779287 4712 generic.go:334] "Generic (PLEG): container finished" podID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerID="821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd" exitCode=143 Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.779363 4712 scope.go:117] "RemoveContainer" containerID="821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.779606 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f5lzb/must-gather-4kqrc" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.830484 4712 scope.go:117] "RemoveContainer" containerID="f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.973406 4712 scope.go:117] "RemoveContainer" containerID="821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd" Jan 31 06:56:56 crc kubenswrapper[4712]: E0131 06:56:56.975285 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd\": container with ID starting with 821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd not found: ID does not exist" containerID="821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.975317 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd"} err="failed to get container status \"821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd\": rpc error: code = NotFound desc = could not find container \"821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd\": container with ID starting with 821480dcead749633581093d0147e8ec68bbb1063007c0e331338c0a5ec61bbd not found: ID does not exist" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.975340 4712 scope.go:117] "RemoveContainer" containerID="f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251" Jan 31 06:56:56 crc kubenswrapper[4712]: E0131 06:56:56.975978 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251\": container with ID starting with f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251 not found: ID does not exist" containerID="f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251" Jan 31 06:56:56 crc kubenswrapper[4712]: I0131 06:56:56.976010 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251"} err="failed to get container status \"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251\": rpc error: code = NotFound desc = could not find container \"f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251\": container with ID starting with f8a7736c197502cd289ec9d90905321cb8c3c4e6af642fee1702519289987251 not found: ID does not exist" Jan 31 06:56:58 crc kubenswrapper[4712]: I0131 06:56:58.516037 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" path="/var/lib/kubelet/pods/16ecbcaf-1c2a-4461-adc6-69ee1316c9d7/volumes" Jan 31 06:57:06 crc kubenswrapper[4712]: I0131 06:57:06.504744 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:57:06 crc kubenswrapper[4712]: E0131 06:57:06.506310 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:57:20 crc kubenswrapper[4712]: I0131 06:57:20.504648 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:57:20 crc kubenswrapper[4712]: E0131 06:57:20.505411 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.517666 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:24 crc kubenswrapper[4712]: E0131 06:57:24.519998 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="copy" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.520155 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="copy" Jan 31 06:57:24 crc kubenswrapper[4712]: E0131 06:57:24.520291 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="gather" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.520379 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="gather" Jan 31 06:57:24 crc kubenswrapper[4712]: E0131 06:57:24.520467 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9184ec7a-9608-4c64-b30e-04cc501cf7ba" containerName="container-00" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.520553 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="9184ec7a-9608-4c64-b30e-04cc501cf7ba" containerName="container-00" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.520889 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="gather" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.521006 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ecbcaf-1c2a-4461-adc6-69ee1316c9d7" containerName="copy" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.521101 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="9184ec7a-9608-4c64-b30e-04cc501cf7ba" containerName="container-00" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.523004 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.531333 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.606807 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr6rv\" (UniqueName: \"kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.607237 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.607623 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.709541 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.709611 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr6rv\" (UniqueName: \"kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.709649 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.710131 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.710467 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.729635 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr6rv\" (UniqueName: \"kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv\") pod \"certified-operators-2cpgg\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:24 crc kubenswrapper[4712]: I0131 06:57:24.857825 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:25 crc kubenswrapper[4712]: I0131 06:57:25.411801 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:26 crc kubenswrapper[4712]: I0131 06:57:26.066218 4712 generic.go:334] "Generic (PLEG): container finished" podID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerID="1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2" exitCode=0 Jan 31 06:57:26 crc kubenswrapper[4712]: I0131 06:57:26.066361 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerDied","Data":"1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2"} Jan 31 06:57:26 crc kubenswrapper[4712]: I0131 06:57:26.066523 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerStarted","Data":"a98116c157577b65bc8746ecde13424cde568643d9dfe22ea7c3382f6c2e2337"} Jan 31 06:57:26 crc kubenswrapper[4712]: I0131 06:57:26.068798 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 06:57:28 crc kubenswrapper[4712]: I0131 06:57:28.087458 4712 generic.go:334] "Generic (PLEG): container finished" podID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerID="18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca" exitCode=0 Jan 31 06:57:28 crc kubenswrapper[4712]: I0131 06:57:28.087633 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerDied","Data":"18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca"} Jan 31 06:57:29 crc kubenswrapper[4712]: I0131 06:57:29.099492 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerStarted","Data":"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3"} Jan 31 06:57:29 crc kubenswrapper[4712]: I0131 06:57:29.126106 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2cpgg" podStartSLOduration=2.6010150579999998 podStartE2EDuration="5.126083626s" podCreationTimestamp="2026-01-31 06:57:24 +0000 UTC" firstStartedPulling="2026-01-31 06:57:26.068513174 +0000 UTC m=+4712.162395015" lastFinishedPulling="2026-01-31 06:57:28.593581742 +0000 UTC m=+4714.687463583" observedRunningTime="2026-01-31 06:57:29.116485869 +0000 UTC m=+4715.210367740" watchObservedRunningTime="2026-01-31 06:57:29.126083626 +0000 UTC m=+4715.219965467" Jan 31 06:57:34 crc kubenswrapper[4712]: I0131 06:57:34.511166 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:57:34 crc kubenswrapper[4712]: E0131 06:57:34.511889 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:57:34 crc kubenswrapper[4712]: I0131 06:57:34.858435 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:34 crc kubenswrapper[4712]: I0131 06:57:34.858493 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:34 crc kubenswrapper[4712]: I0131 06:57:34.903594 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:35 crc kubenswrapper[4712]: I0131 06:57:35.204329 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:35 crc kubenswrapper[4712]: I0131 06:57:35.273118 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:37 crc kubenswrapper[4712]: I0131 06:57:37.175649 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2cpgg" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="registry-server" containerID="cri-o://67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3" gracePeriod=2 Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.132788 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.184839 4712 generic.go:334] "Generic (PLEG): container finished" podID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerID="67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3" exitCode=0 Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.184887 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerDied","Data":"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3"} Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.184920 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2cpgg" event={"ID":"60cbf52d-98b5-4a2b-9e23-14bde3d1485c","Type":"ContainerDied","Data":"a98116c157577b65bc8746ecde13424cde568643d9dfe22ea7c3382f6c2e2337"} Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.184940 4712 scope.go:117] "RemoveContainer" containerID="67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.184893 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2cpgg" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.201294 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities\") pod \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.201420 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content\") pod \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.201503 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr6rv\" (UniqueName: \"kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv\") pod \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\" (UID: \"60cbf52d-98b5-4a2b-9e23-14bde3d1485c\") " Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.202535 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities" (OuterVolumeSpecName: "utilities") pod "60cbf52d-98b5-4a2b-9e23-14bde3d1485c" (UID: "60cbf52d-98b5-4a2b-9e23-14bde3d1485c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.204514 4712 scope.go:117] "RemoveContainer" containerID="18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.216365 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv" (OuterVolumeSpecName: "kube-api-access-fr6rv") pod "60cbf52d-98b5-4a2b-9e23-14bde3d1485c" (UID: "60cbf52d-98b5-4a2b-9e23-14bde3d1485c"). InnerVolumeSpecName "kube-api-access-fr6rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.260011 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60cbf52d-98b5-4a2b-9e23-14bde3d1485c" (UID: "60cbf52d-98b5-4a2b-9e23-14bde3d1485c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.268571 4712 scope.go:117] "RemoveContainer" containerID="1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.304205 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.304246 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.304259 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr6rv\" (UniqueName: \"kubernetes.io/projected/60cbf52d-98b5-4a2b-9e23-14bde3d1485c-kube-api-access-fr6rv\") on node \"crc\" DevicePath \"\"" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.316051 4712 scope.go:117] "RemoveContainer" containerID="67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3" Jan 31 06:57:38 crc kubenswrapper[4712]: E0131 06:57:38.316806 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3\": container with ID starting with 67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3 not found: ID does not exist" containerID="67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.316840 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3"} err="failed to get container status \"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3\": rpc error: code = NotFound desc = could not find container \"67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3\": container with ID starting with 67b039a10e0e8b61f5bcaa644ceb73cf815a0c8392c87a0e17755f406aacb6d3 not found: ID does not exist" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.316869 4712 scope.go:117] "RemoveContainer" containerID="18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca" Jan 31 06:57:38 crc kubenswrapper[4712]: E0131 06:57:38.317424 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca\": container with ID starting with 18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca not found: ID does not exist" containerID="18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.317474 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca"} err="failed to get container status \"18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca\": rpc error: code = NotFound desc = could not find container \"18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca\": container with ID starting with 18088d5302dc6998ef9cb95a1783c0581547c65b4a379f02a0d5a170aa1915ca not found: ID does not exist" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.317509 4712 scope.go:117] "RemoveContainer" containerID="1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2" Jan 31 06:57:38 crc kubenswrapper[4712]: E0131 06:57:38.317903 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2\": container with ID starting with 1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2 not found: ID does not exist" containerID="1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.317948 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2"} err="failed to get container status \"1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2\": rpc error: code = NotFound desc = could not find container \"1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2\": container with ID starting with 1eee923e887473e23f651bf1a0baa0cc957e553ed59f51358b91e2bdf7cca6e2 not found: ID does not exist" Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.525362 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:38 crc kubenswrapper[4712]: I0131 06:57:38.534128 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2cpgg"] Jan 31 06:57:40 crc kubenswrapper[4712]: I0131 06:57:40.517974 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" path="/var/lib/kubelet/pods/60cbf52d-98b5-4a2b-9e23-14bde3d1485c/volumes" Jan 31 06:57:45 crc kubenswrapper[4712]: I0131 06:57:45.505157 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:57:45 crc kubenswrapper[4712]: E0131 06:57:45.506098 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:58:00 crc kubenswrapper[4712]: I0131 06:58:00.504733 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:58:00 crc kubenswrapper[4712]: E0131 06:58:00.505506 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:58:14 crc kubenswrapper[4712]: I0131 06:58:14.511068 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:58:14 crc kubenswrapper[4712]: E0131 06:58:14.511830 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:58:27 crc kubenswrapper[4712]: I0131 06:58:27.504343 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:58:27 crc kubenswrapper[4712]: E0131 06:58:27.505066 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:58:40 crc kubenswrapper[4712]: I0131 06:58:40.504201 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:58:40 crc kubenswrapper[4712]: E0131 06:58:40.504939 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 06:58:52 crc kubenswrapper[4712]: I0131 06:58:52.504727 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 06:58:53 crc kubenswrapper[4712]: I0131 06:58:53.896092 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25"} Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.151793 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2"] Jan 31 07:00:00 crc kubenswrapper[4712]: E0131 07:00:00.152649 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="extract-utilities" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.152664 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="extract-utilities" Jan 31 07:00:00 crc kubenswrapper[4712]: E0131 07:00:00.152697 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="registry-server" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.152705 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="registry-server" Jan 31 07:00:00 crc kubenswrapper[4712]: E0131 07:00:00.152722 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="extract-content" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.152729 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="extract-content" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.152953 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="60cbf52d-98b5-4a2b-9e23-14bde3d1485c" containerName="registry-server" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.153713 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.155851 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.156240 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.161383 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2"] Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.340442 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.340736 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcn45\" (UniqueName: \"kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.340797 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.443555 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.443676 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcn45\" (UniqueName: \"kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.444127 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.445330 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.451222 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.467091 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcn45\" (UniqueName: \"kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45\") pod \"collect-profiles-29497380-q2ns2\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.478767 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:00 crc kubenswrapper[4712]: I0131 07:00:00.939134 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2"] Jan 31 07:00:01 crc kubenswrapper[4712]: I0131 07:00:01.596637 4712 generic.go:334] "Generic (PLEG): container finished" podID="2602f04f-5d01-429b-bc27-746695dc2970" containerID="d7c4edc0c1eb34a62ed0e36ae158f799afadf840192189417d593f9ef131d72f" exitCode=0 Jan 31 07:00:01 crc kubenswrapper[4712]: I0131 07:00:01.597101 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" event={"ID":"2602f04f-5d01-429b-bc27-746695dc2970","Type":"ContainerDied","Data":"d7c4edc0c1eb34a62ed0e36ae158f799afadf840192189417d593f9ef131d72f"} Jan 31 07:00:01 crc kubenswrapper[4712]: I0131 07:00:01.597154 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" event={"ID":"2602f04f-5d01-429b-bc27-746695dc2970","Type":"ContainerStarted","Data":"e1e8a5d00a248287e4fdeedf7b73d4aee16208a917996a2d0aa0c739dee38fdd"} Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:02.994855 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.119792 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume\") pod \"2602f04f-5d01-429b-bc27-746695dc2970\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.119887 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume\") pod \"2602f04f-5d01-429b-bc27-746695dc2970\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.120448 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume" (OuterVolumeSpecName: "config-volume") pod "2602f04f-5d01-429b-bc27-746695dc2970" (UID: "2602f04f-5d01-429b-bc27-746695dc2970"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.119917 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcn45\" (UniqueName: \"kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45\") pod \"2602f04f-5d01-429b-bc27-746695dc2970\" (UID: \"2602f04f-5d01-429b-bc27-746695dc2970\") " Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.120850 4712 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2602f04f-5d01-429b-bc27-746695dc2970-config-volume\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.129762 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2602f04f-5d01-429b-bc27-746695dc2970" (UID: "2602f04f-5d01-429b-bc27-746695dc2970"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.132462 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45" (OuterVolumeSpecName: "kube-api-access-lcn45") pod "2602f04f-5d01-429b-bc27-746695dc2970" (UID: "2602f04f-5d01-429b-bc27-746695dc2970"). InnerVolumeSpecName "kube-api-access-lcn45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.222117 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcn45\" (UniqueName: \"kubernetes.io/projected/2602f04f-5d01-429b-bc27-746695dc2970-kube-api-access-lcn45\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.222147 4712 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2602f04f-5d01-429b-bc27-746695dc2970-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.615147 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" event={"ID":"2602f04f-5d01-429b-bc27-746695dc2970","Type":"ContainerDied","Data":"e1e8a5d00a248287e4fdeedf7b73d4aee16208a917996a2d0aa0c739dee38fdd"} Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.615218 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1e8a5d00a248287e4fdeedf7b73d4aee16208a917996a2d0aa0c739dee38fdd" Jan 31 07:00:03 crc kubenswrapper[4712]: I0131 07:00:03.615291 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29497380-q2ns2" Jan 31 07:00:04 crc kubenswrapper[4712]: I0131 07:00:04.086214 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x"] Jan 31 07:00:04 crc kubenswrapper[4712]: I0131 07:00:04.096425 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29497335-f788x"] Jan 31 07:00:04 crc kubenswrapper[4712]: I0131 07:00:04.516894 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b47ae7f-713f-4f37-af6c-112e264c6ef7" path="/var/lib/kubelet/pods/0b47ae7f-713f-4f37-af6c-112e264c6ef7/volumes" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.023487 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5mfnn/must-gather-s58sh"] Jan 31 07:00:06 crc kubenswrapper[4712]: E0131 07:00:06.024414 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2602f04f-5d01-429b-bc27-746695dc2970" containerName="collect-profiles" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.024434 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="2602f04f-5d01-429b-bc27-746695dc2970" containerName="collect-profiles" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.024641 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="2602f04f-5d01-429b-bc27-746695dc2970" containerName="collect-profiles" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.025836 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.029072 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5mfnn"/"openshift-service-ca.crt" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.033420 4712 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5mfnn"/"kube-root-ca.crt" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.056473 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5mfnn/must-gather-s58sh"] Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.090532 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.090659 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjbf4\" (UniqueName: \"kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.194421 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.194540 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjbf4\" (UniqueName: \"kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.194998 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.214844 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjbf4\" (UniqueName: \"kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4\") pod \"must-gather-s58sh\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.346527 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:00:06 crc kubenswrapper[4712]: I0131 07:00:06.852464 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5mfnn/must-gather-s58sh"] Jan 31 07:00:07 crc kubenswrapper[4712]: I0131 07:00:07.663448 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/must-gather-s58sh" event={"ID":"faddc91f-ad86-4122-9e6c-72fd3d9abf3c","Type":"ContainerStarted","Data":"b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d"} Jan 31 07:00:07 crc kubenswrapper[4712]: I0131 07:00:07.663802 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/must-gather-s58sh" event={"ID":"faddc91f-ad86-4122-9e6c-72fd3d9abf3c","Type":"ContainerStarted","Data":"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0"} Jan 31 07:00:07 crc kubenswrapper[4712]: I0131 07:00:07.663815 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/must-gather-s58sh" event={"ID":"faddc91f-ad86-4122-9e6c-72fd3d9abf3c","Type":"ContainerStarted","Data":"40a9b223cfde82739ad5772be5a576a05760e4697eddebedc19174e35ecd265e"} Jan 31 07:00:07 crc kubenswrapper[4712]: I0131 07:00:07.689829 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5mfnn/must-gather-s58sh" podStartSLOduration=2.689812778 podStartE2EDuration="2.689812778s" podCreationTimestamp="2026-01-31 07:00:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 07:00:07.682797456 +0000 UTC m=+4873.776679287" watchObservedRunningTime="2026-01-31 07:00:07.689812778 +0000 UTC m=+4873.783694619" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.451435 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-djn89"] Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.452998 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.454656 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5mfnn"/"default-dockercfg-vx9cw" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.533148 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5m7f\" (UniqueName: \"kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.533252 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.635256 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5m7f\" (UniqueName: \"kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.635364 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.635590 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.657141 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5m7f\" (UniqueName: \"kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f\") pod \"crc-debug-djn89\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:11 crc kubenswrapper[4712]: I0131 07:00:11.777615 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:12 crc kubenswrapper[4712]: I0131 07:00:12.708860 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/crc-debug-djn89" event={"ID":"490cbac4-e5f4-4c79-84a4-ee52c40cc596","Type":"ContainerStarted","Data":"684627c9188afc2926866a66ddf29d4abad8fbd46e99062b2670ada1a215b32a"} Jan 31 07:00:12 crc kubenswrapper[4712]: I0131 07:00:12.710370 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/crc-debug-djn89" event={"ID":"490cbac4-e5f4-4c79-84a4-ee52c40cc596","Type":"ContainerStarted","Data":"a4e155539ab31a7bb43e1245288015d3c2b92cda548cf0944da986f36aed04e0"} Jan 31 07:00:12 crc kubenswrapper[4712]: I0131 07:00:12.738305 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5mfnn/crc-debug-djn89" podStartSLOduration=1.738277721 podStartE2EDuration="1.738277721s" podCreationTimestamp="2026-01-31 07:00:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 07:00:12.728023068 +0000 UTC m=+4878.821904909" watchObservedRunningTime="2026-01-31 07:00:12.738277721 +0000 UTC m=+4878.832159562" Jan 31 07:00:22 crc kubenswrapper[4712]: I0131 07:00:22.912351 4712 scope.go:117] "RemoveContainer" containerID="cc8f947125e7bc98bdfa3f9fa7c551c6c67e007d9ff90c42b71c2e356ade17d7" Jan 31 07:00:23 crc kubenswrapper[4712]: I0131 07:00:23.832329 4712 generic.go:334] "Generic (PLEG): container finished" podID="490cbac4-e5f4-4c79-84a4-ee52c40cc596" containerID="684627c9188afc2926866a66ddf29d4abad8fbd46e99062b2670ada1a215b32a" exitCode=0 Jan 31 07:00:23 crc kubenswrapper[4712]: I0131 07:00:23.832412 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/crc-debug-djn89" event={"ID":"490cbac4-e5f4-4c79-84a4-ee52c40cc596","Type":"ContainerDied","Data":"684627c9188afc2926866a66ddf29d4abad8fbd46e99062b2670ada1a215b32a"} Jan 31 07:00:24 crc kubenswrapper[4712]: I0131 07:00:24.956693 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:24 crc kubenswrapper[4712]: I0131 07:00:24.999925 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-djn89"] Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.002625 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host\") pod \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.002836 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5m7f\" (UniqueName: \"kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f\") pod \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\" (UID: \"490cbac4-e5f4-4c79-84a4-ee52c40cc596\") " Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.002837 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host" (OuterVolumeSpecName: "host") pod "490cbac4-e5f4-4c79-84a4-ee52c40cc596" (UID: "490cbac4-e5f4-4c79-84a4-ee52c40cc596"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.003522 4712 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/490cbac4-e5f4-4c79-84a4-ee52c40cc596-host\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.010163 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-djn89"] Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.013308 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f" (OuterVolumeSpecName: "kube-api-access-q5m7f") pod "490cbac4-e5f4-4c79-84a4-ee52c40cc596" (UID: "490cbac4-e5f4-4c79-84a4-ee52c40cc596"). InnerVolumeSpecName "kube-api-access-q5m7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.105212 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5m7f\" (UniqueName: \"kubernetes.io/projected/490cbac4-e5f4-4c79-84a4-ee52c40cc596-kube-api-access-q5m7f\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.628449 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:25 crc kubenswrapper[4712]: E0131 07:00:25.630252 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="490cbac4-e5f4-4c79-84a4-ee52c40cc596" containerName="container-00" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.630366 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="490cbac4-e5f4-4c79-84a4-ee52c40cc596" containerName="container-00" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.630779 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="490cbac4-e5f4-4c79-84a4-ee52c40cc596" containerName="container-00" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.632682 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.645928 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.820007 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr98d\" (UniqueName: \"kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.820143 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.820194 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.852375 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e155539ab31a7bb43e1245288015d3c2b92cda548cf0944da986f36aed04e0" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.852522 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-djn89" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.922373 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.922628 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr98d\" (UniqueName: \"kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.922750 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.922874 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.923499 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.944331 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr98d\" (UniqueName: \"kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d\") pod \"community-operators-52h2b\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:25 crc kubenswrapper[4712]: I0131 07:00:25.993832 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.409236 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-sjtqv"] Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.410648 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.413151 4712 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-5mfnn"/"default-dockercfg-vx9cw" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.444391 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-str8f\" (UniqueName: \"kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.444577 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.517609 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="490cbac4-e5f4-4c79-84a4-ee52c40cc596" path="/var/lib/kubelet/pods/490cbac4-e5f4-4c79-84a4-ee52c40cc596/volumes" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.546475 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.546642 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-str8f\" (UniqueName: \"kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.546949 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.567049 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-str8f\" (UniqueName: \"kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f\") pod \"crc-debug-sjtqv\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.572675 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.730395 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:26 crc kubenswrapper[4712]: W0131 07:00:26.756076 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod458371ca_606b_4650_bb81_fb42166e4de6.slice/crio-9b28a55e998453f457035631c226b76f734ef7a85b10bf76b010f83f63f8a56a WatchSource:0}: Error finding container 9b28a55e998453f457035631c226b76f734ef7a85b10bf76b010f83f63f8a56a: Status 404 returned error can't find the container with id 9b28a55e998453f457035631c226b76f734ef7a85b10bf76b010f83f63f8a56a Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.865498 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerStarted","Data":"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595"} Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.865551 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerStarted","Data":"a56694f6056eea72a5995dd9e1ec2cd015966e226f9fb45ae4363f689384bfbd"} Jan 31 07:00:26 crc kubenswrapper[4712]: I0131 07:00:26.868302 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" event={"ID":"458371ca-606b-4650-bb81-fb42166e4de6","Type":"ContainerStarted","Data":"9b28a55e998453f457035631c226b76f734ef7a85b10bf76b010f83f63f8a56a"} Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.877592 4712 generic.go:334] "Generic (PLEG): container finished" podID="0dc176e4-945e-414e-8c82-f654086dd217" containerID="db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595" exitCode=0 Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.877694 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerDied","Data":"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595"} Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.880443 4712 generic.go:334] "Generic (PLEG): container finished" podID="458371ca-606b-4650-bb81-fb42166e4de6" containerID="70970d681e130e4228b2d95bcceac5b42a313d49d514ac990e06ff2e1aa8390e" exitCode=1 Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.880486 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" event={"ID":"458371ca-606b-4650-bb81-fb42166e4de6","Type":"ContainerDied","Data":"70970d681e130e4228b2d95bcceac5b42a313d49d514ac990e06ff2e1aa8390e"} Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.940954 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-sjtqv"] Jan 31 07:00:27 crc kubenswrapper[4712]: I0131 07:00:27.951101 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5mfnn/crc-debug-sjtqv"] Jan 31 07:00:28 crc kubenswrapper[4712]: I0131 07:00:28.890166 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerStarted","Data":"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56"} Jan 31 07:00:28 crc kubenswrapper[4712]: I0131 07:00:28.991400 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.094416 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host\") pod \"458371ca-606b-4650-bb81-fb42166e4de6\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.094509 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-str8f\" (UniqueName: \"kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f\") pod \"458371ca-606b-4650-bb81-fb42166e4de6\" (UID: \"458371ca-606b-4650-bb81-fb42166e4de6\") " Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.094547 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host" (OuterVolumeSpecName: "host") pod "458371ca-606b-4650-bb81-fb42166e4de6" (UID: "458371ca-606b-4650-bb81-fb42166e4de6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.094784 4712 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/458371ca-606b-4650-bb81-fb42166e4de6-host\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.100833 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f" (OuterVolumeSpecName: "kube-api-access-str8f") pod "458371ca-606b-4650-bb81-fb42166e4de6" (UID: "458371ca-606b-4650-bb81-fb42166e4de6"). InnerVolumeSpecName "kube-api-access-str8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.195586 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-str8f\" (UniqueName: \"kubernetes.io/projected/458371ca-606b-4650-bb81-fb42166e4de6-kube-api-access-str8f\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.901072 4712 generic.go:334] "Generic (PLEG): container finished" podID="0dc176e4-945e-414e-8c82-f654086dd217" containerID="d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56" exitCode=0 Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.903268 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerDied","Data":"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56"} Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.904596 4712 scope.go:117] "RemoveContainer" containerID="70970d681e130e4228b2d95bcceac5b42a313d49d514ac990e06ff2e1aa8390e" Jan 31 07:00:29 crc kubenswrapper[4712]: I0131 07:00:29.904728 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/crc-debug-sjtqv" Jan 31 07:00:30 crc kubenswrapper[4712]: I0131 07:00:30.513899 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="458371ca-606b-4650-bb81-fb42166e4de6" path="/var/lib/kubelet/pods/458371ca-606b-4650-bb81-fb42166e4de6/volumes" Jan 31 07:00:30 crc kubenswrapper[4712]: I0131 07:00:30.914980 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerStarted","Data":"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37"} Jan 31 07:00:30 crc kubenswrapper[4712]: I0131 07:00:30.942720 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-52h2b" podStartSLOduration=3.522320878 podStartE2EDuration="5.942693077s" podCreationTimestamp="2026-01-31 07:00:25 +0000 UTC" firstStartedPulling="2026-01-31 07:00:27.880341668 +0000 UTC m=+4893.974223509" lastFinishedPulling="2026-01-31 07:00:30.300713867 +0000 UTC m=+4896.394595708" observedRunningTime="2026-01-31 07:00:30.932731612 +0000 UTC m=+4897.026613463" watchObservedRunningTime="2026-01-31 07:00:30.942693077 +0000 UTC m=+4897.036574918" Jan 31 07:00:35 crc kubenswrapper[4712]: I0131 07:00:35.994280 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:35 crc kubenswrapper[4712]: I0131 07:00:35.995965 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:36 crc kubenswrapper[4712]: I0131 07:00:36.045828 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:37 crc kubenswrapper[4712]: I0131 07:00:37.011939 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:37 crc kubenswrapper[4712]: I0131 07:00:37.060208 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:38 crc kubenswrapper[4712]: I0131 07:00:38.984335 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-52h2b" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="registry-server" containerID="cri-o://fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37" gracePeriod=2 Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.450039 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.520635 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities\") pod \"0dc176e4-945e-414e-8c82-f654086dd217\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.520690 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr98d\" (UniqueName: \"kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d\") pod \"0dc176e4-945e-414e-8c82-f654086dd217\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.520787 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content\") pod \"0dc176e4-945e-414e-8c82-f654086dd217\" (UID: \"0dc176e4-945e-414e-8c82-f654086dd217\") " Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.521682 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities" (OuterVolumeSpecName: "utilities") pod "0dc176e4-945e-414e-8c82-f654086dd217" (UID: "0dc176e4-945e-414e-8c82-f654086dd217"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.525707 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d" (OuterVolumeSpecName: "kube-api-access-kr98d") pod "0dc176e4-945e-414e-8c82-f654086dd217" (UID: "0dc176e4-945e-414e-8c82-f654086dd217"). InnerVolumeSpecName "kube-api-access-kr98d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.612144 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dc176e4-945e-414e-8c82-f654086dd217" (UID: "0dc176e4-945e-414e-8c82-f654086dd217"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.623510 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.623539 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr98d\" (UniqueName: \"kubernetes.io/projected/0dc176e4-945e-414e-8c82-f654086dd217-kube-api-access-kr98d\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.636268 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dc176e4-945e-414e-8c82-f654086dd217-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.997932 4712 generic.go:334] "Generic (PLEG): container finished" podID="0dc176e4-945e-414e-8c82-f654086dd217" containerID="fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37" exitCode=0 Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.997975 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerDied","Data":"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37"} Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.998002 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-52h2b" event={"ID":"0dc176e4-945e-414e-8c82-f654086dd217","Type":"ContainerDied","Data":"a56694f6056eea72a5995dd9e1ec2cd015966e226f9fb45ae4363f689384bfbd"} Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.998021 4712 scope.go:117] "RemoveContainer" containerID="fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37" Jan 31 07:00:39 crc kubenswrapper[4712]: I0131 07:00:39.998222 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-52h2b" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.024488 4712 scope.go:117] "RemoveContainer" containerID="d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.041130 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.049581 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-52h2b"] Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.075607 4712 scope.go:117] "RemoveContainer" containerID="db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.093560 4712 scope.go:117] "RemoveContainer" containerID="fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37" Jan 31 07:00:40 crc kubenswrapper[4712]: E0131 07:00:40.094021 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37\": container with ID starting with fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37 not found: ID does not exist" containerID="fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.094065 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37"} err="failed to get container status \"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37\": rpc error: code = NotFound desc = could not find container \"fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37\": container with ID starting with fbed4e47b58df9a2dfb622e7f8c7d2f0a13573e96b700245d903c57947890d37 not found: ID does not exist" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.094094 4712 scope.go:117] "RemoveContainer" containerID="d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56" Jan 31 07:00:40 crc kubenswrapper[4712]: E0131 07:00:40.094624 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56\": container with ID starting with d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56 not found: ID does not exist" containerID="d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.094649 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56"} err="failed to get container status \"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56\": rpc error: code = NotFound desc = could not find container \"d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56\": container with ID starting with d9920469b0bd9e8f2453309b7680f48688fd5d5c8fc41706f44174f25fe7bf56 not found: ID does not exist" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.094661 4712 scope.go:117] "RemoveContainer" containerID="db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595" Jan 31 07:00:40 crc kubenswrapper[4712]: E0131 07:00:40.094908 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595\": container with ID starting with db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595 not found: ID does not exist" containerID="db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.094937 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595"} err="failed to get container status \"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595\": rpc error: code = NotFound desc = could not find container \"db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595\": container with ID starting with db4e16ac7699aba176c7133810ff130b9bd23cc8bd7f5b2917e2145b5c177595 not found: ID does not exist" Jan 31 07:00:40 crc kubenswrapper[4712]: I0131 07:00:40.514382 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc176e4-945e-414e-8c82-f654086dd217" path="/var/lib/kubelet/pods/0dc176e4-945e-414e-8c82-f654086dd217/volumes" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.575365 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:00:46 crc kubenswrapper[4712]: E0131 07:00:46.576189 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="registry-server" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576202 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="registry-server" Jan 31 07:00:46 crc kubenswrapper[4712]: E0131 07:00:46.576216 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458371ca-606b-4650-bb81-fb42166e4de6" containerName="container-00" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576222 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="458371ca-606b-4650-bb81-fb42166e4de6" containerName="container-00" Jan 31 07:00:46 crc kubenswrapper[4712]: E0131 07:00:46.576234 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="extract-utilities" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576240 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="extract-utilities" Jan 31 07:00:46 crc kubenswrapper[4712]: E0131 07:00:46.576261 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="extract-content" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576266 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="extract-content" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576466 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="458371ca-606b-4650-bb81-fb42166e4de6" containerName="container-00" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.576477 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc176e4-945e-414e-8c82-f654086dd217" containerName="registry-server" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.577770 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.588465 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.768289 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.768406 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.768610 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25q2f\" (UniqueName: \"kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.870333 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.870428 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25q2f\" (UniqueName: \"kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.870537 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.871336 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.871673 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.897066 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25q2f\" (UniqueName: \"kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f\") pod \"redhat-marketplace-p85px\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:46 crc kubenswrapper[4712]: I0131 07:00:46.897889 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:47 crc kubenswrapper[4712]: I0131 07:00:47.435115 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:00:48 crc kubenswrapper[4712]: I0131 07:00:48.083964 4712 generic.go:334] "Generic (PLEG): container finished" podID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerID="c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568" exitCode=0 Jan 31 07:00:48 crc kubenswrapper[4712]: I0131 07:00:48.084012 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerDied","Data":"c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568"} Jan 31 07:00:48 crc kubenswrapper[4712]: I0131 07:00:48.084303 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerStarted","Data":"ceab8a153b3fab166e1bbcff2d9ad214f8b6324179cdcd3c0601892dfad5b7f5"} Jan 31 07:00:49 crc kubenswrapper[4712]: I0131 07:00:49.095115 4712 generic.go:334] "Generic (PLEG): container finished" podID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerID="52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb" exitCode=0 Jan 31 07:00:49 crc kubenswrapper[4712]: I0131 07:00:49.095219 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerDied","Data":"52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb"} Jan 31 07:00:50 crc kubenswrapper[4712]: I0131 07:00:50.105888 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerStarted","Data":"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88"} Jan 31 07:00:50 crc kubenswrapper[4712]: I0131 07:00:50.132222 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p85px" podStartSLOduration=2.487495965 podStartE2EDuration="4.132204881s" podCreationTimestamp="2026-01-31 07:00:46 +0000 UTC" firstStartedPulling="2026-01-31 07:00:48.086060388 +0000 UTC m=+4914.179942229" lastFinishedPulling="2026-01-31 07:00:49.730769304 +0000 UTC m=+4915.824651145" observedRunningTime="2026-01-31 07:00:50.121972098 +0000 UTC m=+4916.215853939" watchObservedRunningTime="2026-01-31 07:00:50.132204881 +0000 UTC m=+4916.226086722" Jan 31 07:00:56 crc kubenswrapper[4712]: I0131 07:00:56.898902 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:56 crc kubenswrapper[4712]: I0131 07:00:56.901410 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:56 crc kubenswrapper[4712]: I0131 07:00:56.948487 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:57 crc kubenswrapper[4712]: I0131 07:00:57.220823 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:57 crc kubenswrapper[4712]: I0131 07:00:57.270738 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.190578 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p85px" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="registry-server" containerID="cri-o://7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88" gracePeriod=2 Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.670107 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.783762 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25q2f\" (UniqueName: \"kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f\") pod \"ee6e111a-4814-4ff0-b817-e74814d588a1\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.783889 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities\") pod \"ee6e111a-4814-4ff0-b817-e74814d588a1\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.784013 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content\") pod \"ee6e111a-4814-4ff0-b817-e74814d588a1\" (UID: \"ee6e111a-4814-4ff0-b817-e74814d588a1\") " Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.785002 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities" (OuterVolumeSpecName: "utilities") pod "ee6e111a-4814-4ff0-b817-e74814d588a1" (UID: "ee6e111a-4814-4ff0-b817-e74814d588a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.797437 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f" (OuterVolumeSpecName: "kube-api-access-25q2f") pod "ee6e111a-4814-4ff0-b817-e74814d588a1" (UID: "ee6e111a-4814-4ff0-b817-e74814d588a1"). InnerVolumeSpecName "kube-api-access-25q2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.886536 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 07:00:59 crc kubenswrapper[4712]: I0131 07:00:59.886591 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25q2f\" (UniqueName: \"kubernetes.io/projected/ee6e111a-4814-4ff0-b817-e74814d588a1-kube-api-access-25q2f\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.161705 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29497381-b6tmf"] Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.162108 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="extract-utilities" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.162123 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="extract-utilities" Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.162160 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="registry-server" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.162185 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="registry-server" Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.162202 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="extract-content" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.162213 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="extract-content" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.162400 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerName="registry-server" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.163059 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.176758 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29497381-b6tmf"] Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.203695 4712 generic.go:334] "Generic (PLEG): container finished" podID="ee6e111a-4814-4ff0-b817-e74814d588a1" containerID="7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88" exitCode=0 Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.203758 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerDied","Data":"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88"} Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.203811 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p85px" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.203840 4712 scope.go:117] "RemoveContainer" containerID="7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.203824 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p85px" event={"ID":"ee6e111a-4814-4ff0-b817-e74814d588a1","Type":"ContainerDied","Data":"ceab8a153b3fab166e1bbcff2d9ad214f8b6324179cdcd3c0601892dfad5b7f5"} Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.219293 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee6e111a-4814-4ff0-b817-e74814d588a1" (UID: "ee6e111a-4814-4ff0-b817-e74814d588a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.221837 4712 scope.go:117] "RemoveContainer" containerID="52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.244292 4712 scope.go:117] "RemoveContainer" containerID="c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.285984 4712 scope.go:117] "RemoveContainer" containerID="7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88" Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.286568 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88\": container with ID starting with 7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88 not found: ID does not exist" containerID="7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.286611 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88"} err="failed to get container status \"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88\": rpc error: code = NotFound desc = could not find container \"7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88\": container with ID starting with 7859d7373727d22022cf2c4766664ec65c1d726507e40ac1186773e912b52e88 not found: ID does not exist" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.286639 4712 scope.go:117] "RemoveContainer" containerID="52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb" Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.287038 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb\": container with ID starting with 52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb not found: ID does not exist" containerID="52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.287065 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb"} err="failed to get container status \"52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb\": rpc error: code = NotFound desc = could not find container \"52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb\": container with ID starting with 52804948e04b1f9bceb79885bc2e908aa9acbae150c449d2c8fd7184d3199feb not found: ID does not exist" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.287080 4712 scope.go:117] "RemoveContainer" containerID="c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568" Jan 31 07:01:00 crc kubenswrapper[4712]: E0131 07:01:00.287418 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568\": container with ID starting with c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568 not found: ID does not exist" containerID="c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.287515 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568"} err="failed to get container status \"c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568\": rpc error: code = NotFound desc = could not find container \"c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568\": container with ID starting with c51d19ac4e8bcf694f5ff311995a97cff51138aee9136dc890e3624ddfd3a568 not found: ID does not exist" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.294948 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.295002 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.295035 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.295074 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgdrw\" (UniqueName: \"kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.295181 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee6e111a-4814-4ff0-b817-e74814d588a1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.396956 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgdrw\" (UniqueName: \"kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.397287 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.397373 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.397412 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.401459 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.401642 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.402633 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.412288 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgdrw\" (UniqueName: \"kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw\") pod \"keystone-cron-29497381-b6tmf\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.480755 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.612292 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.634270 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p85px"] Jan 31 07:01:00 crc kubenswrapper[4712]: I0131 07:01:00.976387 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29497381-b6tmf"] Jan 31 07:01:01 crc kubenswrapper[4712]: W0131 07:01:01.386418 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0a12f3a_54d0_4490_9b23_1942bbe8520e.slice/crio-cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2 WatchSource:0}: Error finding container cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2: Status 404 returned error can't find the container with id cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2 Jan 31 07:01:02 crc kubenswrapper[4712]: I0131 07:01:02.240988 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29497381-b6tmf" event={"ID":"d0a12f3a-54d0-4490-9b23-1942bbe8520e","Type":"ContainerStarted","Data":"52aab64dd3c23c0756f62bc28e1fdfa6ab8d2935f1255184edb2475c22a3c4b3"} Jan 31 07:01:02 crc kubenswrapper[4712]: I0131 07:01:02.241455 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29497381-b6tmf" event={"ID":"d0a12f3a-54d0-4490-9b23-1942bbe8520e","Type":"ContainerStarted","Data":"cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2"} Jan 31 07:01:02 crc kubenswrapper[4712]: I0131 07:01:02.261584 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29497381-b6tmf" podStartSLOduration=2.261560171 podStartE2EDuration="2.261560171s" podCreationTimestamp="2026-01-31 07:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-31 07:01:02.257124951 +0000 UTC m=+4928.351006792" watchObservedRunningTime="2026-01-31 07:01:02.261560171 +0000 UTC m=+4928.355442012" Jan 31 07:01:02 crc kubenswrapper[4712]: I0131 07:01:02.514881 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee6e111a-4814-4ff0-b817-e74814d588a1" path="/var/lib/kubelet/pods/ee6e111a-4814-4ff0-b817-e74814d588a1/volumes" Jan 31 07:01:05 crc kubenswrapper[4712]: I0131 07:01:05.268485 4712 generic.go:334] "Generic (PLEG): container finished" podID="d0a12f3a-54d0-4490-9b23-1942bbe8520e" containerID="52aab64dd3c23c0756f62bc28e1fdfa6ab8d2935f1255184edb2475c22a3c4b3" exitCode=0 Jan 31 07:01:05 crc kubenswrapper[4712]: I0131 07:01:05.268587 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29497381-b6tmf" event={"ID":"d0a12f3a-54d0-4490-9b23-1942bbe8520e","Type":"ContainerDied","Data":"52aab64dd3c23c0756f62bc28e1fdfa6ab8d2935f1255184edb2475c22a3c4b3"} Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.631562 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.740784 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgdrw\" (UniqueName: \"kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw\") pod \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.740824 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys\") pod \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.740869 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data\") pod \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.741101 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle\") pod \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\" (UID: \"d0a12f3a-54d0-4490-9b23-1942bbe8520e\") " Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.746224 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw" (OuterVolumeSpecName: "kube-api-access-mgdrw") pod "d0a12f3a-54d0-4490-9b23-1942bbe8520e" (UID: "d0a12f3a-54d0-4490-9b23-1942bbe8520e"). InnerVolumeSpecName "kube-api-access-mgdrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.746498 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d0a12f3a-54d0-4490-9b23-1942bbe8520e" (UID: "d0a12f3a-54d0-4490-9b23-1942bbe8520e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.767615 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0a12f3a-54d0-4490-9b23-1942bbe8520e" (UID: "d0a12f3a-54d0-4490-9b23-1942bbe8520e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.802269 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data" (OuterVolumeSpecName: "config-data") pod "d0a12f3a-54d0-4490-9b23-1942bbe8520e" (UID: "d0a12f3a-54d0-4490-9b23-1942bbe8520e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.843386 4712 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.843427 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgdrw\" (UniqueName: \"kubernetes.io/projected/d0a12f3a-54d0-4490-9b23-1942bbe8520e-kube-api-access-mgdrw\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.843437 4712 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:06 crc kubenswrapper[4712]: I0131 07:01:06.843447 4712 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0a12f3a-54d0-4490-9b23-1942bbe8520e-config-data\") on node \"crc\" DevicePath \"\"" Jan 31 07:01:07 crc kubenswrapper[4712]: I0131 07:01:07.287378 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29497381-b6tmf" event={"ID":"d0a12f3a-54d0-4490-9b23-1942bbe8520e","Type":"ContainerDied","Data":"cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2"} Jan 31 07:01:07 crc kubenswrapper[4712]: I0131 07:01:07.287418 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29497381-b6tmf" Jan 31 07:01:07 crc kubenswrapper[4712]: I0131 07:01:07.287425 4712 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd3702e3186b7e7c7db82964878edb411e575fc0f06531e7a652163b016255b2" Jan 31 07:01:12 crc kubenswrapper[4712]: I0131 07:01:12.497800 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:01:12 crc kubenswrapper[4712]: I0131 07:01:12.498183 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:01:24 crc kubenswrapper[4712]: I0131 07:01:24.978879 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff848578-hzzpm_913d1fd5-e3ce-4632-abda-a7161638d494/barbican-api/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.146255 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-77f65dc848-97sk5_8bac1193-72ba-4208-9e62-9eae63196d1c/barbican-keystone-listener/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.195461 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff848578-hzzpm_913d1fd5-e3ce-4632-abda-a7161638d494/barbican-api-log/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.253734 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-77f65dc848-97sk5_8bac1193-72ba-4208-9e62-9eae63196d1c/barbican-keystone-listener-log/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.395523 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f578d49bf-ffhtj_872694e2-8988-499c-b05b-3597e7d4e327/barbican-worker/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.513752 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f578d49bf-ffhtj_872694e2-8988-499c-b05b-3597e7d4e327/barbican-worker-log/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.649014 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-d9662_d61c9236-0514-4d46-b7a7-49f8d5e63685/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.774779 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/ceilometer-central-agent/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.836719 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/ceilometer-notification-agent/0.log" Jan 31 07:01:25 crc kubenswrapper[4712]: I0131 07:01:25.887343 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/proxy-httpd/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.035020 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_9e9e028b-835f-498c-a16a-88a444ee2739/sg-core/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.138486 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f1761086-d010-4dcc-91fb-a9503805de81/cinder-api-log/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.300940 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f1761086-d010-4dcc-91fb-a9503805de81/cinder-api/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.365516 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2118bcf2-8afd-4e35-b53b-6998f5c6a5cc/cinder-scheduler/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.406559 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2118bcf2-8afd-4e35-b53b-6998f5c6a5cc/probe/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.574489 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-vwxj6_033a2ac5-1d6e-4c75-9792-d54b4da7ef85/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.667425 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-jf2lp_8a24ba49-5360-4c66-a06d-36f6915384a9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:26 crc kubenswrapper[4712]: I0131 07:01:26.908807 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/init/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.239248 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/init/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.240643 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-595f5c6cbf-z979m_848e784a-8596-4d55-bb70-f4a99fd14873/dnsmasq-dns/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.267325 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-jxq4c_d52a5e5f-6195-4acc-b30b-c872b19bbd10/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.472037 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b83454be-e489-429c-a4b3-8914ee18daa4/glance-httpd/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.502641 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b83454be-e489-429c-a4b3-8914ee18daa4/glance-log/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.680019 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_baae8e5b-9153-449a-92f2-34eb6cb7dbd3/glance-log/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.716569 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_baae8e5b-9153-449a-92f2-34eb6cb7dbd3/glance-httpd/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.805809 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-t9596_1b9eefb9-d787-4042-b8cf-b1d7160c09a4/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:27 crc kubenswrapper[4712]: I0131 07:01:27.932038 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-f6t8j_a7836756-e240-4f18-b3cc-f820d8dd026d/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.203982 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29497381-b6tmf_d0a12f3a-54d0-4490-9b23-1942bbe8520e/keystone-cron/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.374764 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-76bf8584d5-c59jx_623c2dc0-c837-436c-ade9-19c8a8fedfb6/keystone-api/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.450602 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_2f56d276-e653-4e6b-b5a7-e530babf7175/kube-state-metrics/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.569926 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-x74g8_1e610581-bd59-418a-901a-7a37acc85442/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.881744 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6445df85d9-7dknt_2b8f18ba-9096-4d08-9d1f-4efed6b7883a/neutron-httpd/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.948766 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6445df85d9-7dknt_2b8f18ba-9096-4d08-9d1f-4efed6b7883a/neutron-api/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.969713 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_5c874b7f-26e1-436e-9cdc-a440a86b72ec/memcached/0.log" Jan 31 07:01:28 crc kubenswrapper[4712]: I0131 07:01:28.976403 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-kn7lb_104962fa-0d0e-40b4-aacc-94ae160c761d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:29 crc kubenswrapper[4712]: I0131 07:01:29.462795 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4c060068-c993-4028-8639-64e08eb08bd4/nova-cell0-conductor-conductor/0.log" Jan 31 07:01:29 crc kubenswrapper[4712]: I0131 07:01:29.569784 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e2c19455-9dc4-45de-afd3-d55b91c729c6/nova-api-log/0.log" Jan 31 07:01:29 crc kubenswrapper[4712]: I0131 07:01:29.687546 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_7193b7ce-20b7-49f6-a5fc-f48cfbfc51f7/nova-cell1-conductor-conductor/0.log" Jan 31 07:01:29 crc kubenswrapper[4712]: I0131 07:01:29.879261 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ab75dbe3-c922-4440-b310-6fe0d2201274/nova-cell1-novncproxy-novncproxy/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.011488 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e2c19455-9dc4-45de-afd3-d55b91c729c6/nova-api-api/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.164823 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9fpgv_293b2cc8-393a-4043-ac70-89b0a519de4b/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.292505 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c032ae9d-cfbd-4184-8abb-5ccb6e158a0c/nova-metadata-log/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.605622 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/mysql-bootstrap/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.608950 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_6d55a0e1-ea9a-4236-b89d-3b09b6f6dce9/nova-scheduler-scheduler/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.821215 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/mysql-bootstrap/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.855426 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/mysql-bootstrap/0.log" Jan 31 07:01:30 crc kubenswrapper[4712]: I0131 07:01:30.899964 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9029816c-730b-4d38-9464-1d0ed936fc10/galera/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.143377 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5403021d-e6d4-4e4e-aa8e-8879f65f9f36/openstackclient/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.153200 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/mysql-bootstrap/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.176386 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c54792d1-2de9-4c85-a843-35d4b14dd8e4/galera/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.417065 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server-init/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.484105 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-wxrfr_cdd7091c-c446-44de-a591-89bcd4901347/openstack-network-exporter/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.602895 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_c032ae9d-cfbd-4184-8abb-5ccb6e158a0c/nova-metadata-metadata/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.740195 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovs-vswitchd/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.761743 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server-init/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.797030 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-xzbjj_222305d6-dde8-43bd-801c-7420d0a05add/ovsdb-server/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.837181 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-xbh95_16fc3ee9-9ecc-45b7-8410-d9a6b2da5863/ovn-controller/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.971253 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-2lsmd_66f934d4-4354-4d23-80e4-0fd0b6facf41/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:31 crc kubenswrapper[4712]: I0131 07:01:31.993276 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2971f42d-daa7-474b-8d5e-06f5d943d091/openstack-network-exporter/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.048961 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_2971f42d-daa7-474b-8d5e-06f5d943d091/ovn-northd/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.194481 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3eb28efb-abfd-4570-8282-f0189e523fa3/ovsdbserver-nb/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.196947 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3eb28efb-abfd-4570-8282-f0189e523fa3/openstack-network-exporter/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.244361 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_58cd6591-0ba3-4102-b8de-79b3c7d77f8e/openstack-network-exporter/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.379715 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_58cd6591-0ba3-4102-b8de-79b3c7d77f8e/ovsdbserver-sb/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.505989 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-778dc5b584-k7lvt_c1366fba-a1ea-415f-8b63-43648b6b16fb/placement-api/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.526926 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-778dc5b584-k7lvt_c1366fba-a1ea-415f-8b63-43648b6b16fb/placement-log/0.log" Jan 31 07:01:32 crc kubenswrapper[4712]: I0131 07:01:32.628001 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/setup-container/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.574512 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/rabbitmq/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.601454 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_dc6391b1-c3f6-4ae8-ad8f-00572ac27b87/setup-container/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.610815 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/setup-container/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.775035 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/setup-container/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.786701 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_57a64e6d-ff8a-480a-aa16-563b5b127e6f/rabbitmq/0.log" Jan 31 07:01:33 crc kubenswrapper[4712]: I0131 07:01:33.868498 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-wvfhm_8e2c8610-b420-4018-b0d3-62afdc779dba/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.018630 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-drvv8_76c5b162-1ced-457b-90f1-fbf85edf746d/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.091636 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-4n2rl_b7519335-f8e2-4211-8b99-a9fc3ac51150/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.220513 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-w4v27_ef660f01-216f-4f2e-89b8-55e0fb24c506/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.304267 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-cd9mf_d0cbadc8-9972-41d6-9313-0337cb84f72d/ssh-known-hosts-edpm-deployment/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.480730 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8557fb7df9-bhzkt_2a8507c3-4b91-4b81-83ba-4bb63b3745f0/proxy-server/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.579849 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8557fb7df9-bhzkt_2a8507c3-4b91-4b81-83ba-4bb63b3745f0/proxy-httpd/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.590338 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-xnfzc_108e0e4f-9137-4e8c-aec6-032c1585852c/swift-ring-rebalance/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.746382 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-auditor/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.824010 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-replicator/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.841677 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-reaper/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.853562 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-auditor/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.883061 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/account-server/0.log" Jan 31 07:01:34 crc kubenswrapper[4712]: I0131 07:01:34.966245 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-replicator/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.442881 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-server/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.491778 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-expirer/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.494128 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/container-updater/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.511999 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-auditor/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.556601 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-replicator/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.675525 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-server/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.686457 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/rsync/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.702335 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/swift-recon-cron/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.715350 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_526325aa-f517-45ab-b0d3-b7285ef8db7b/object-updater/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.914692 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_0f27224a-407b-4803-afd6-9c1caa3fbfdf/tempest-tests-tempest-tests-runner/0.log" Jan 31 07:01:35 crc kubenswrapper[4712]: I0131 07:01:35.914941 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-b5dpf_467fe339-07fd-4c51-95df-4c8c123e2c03/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:36 crc kubenswrapper[4712]: I0131 07:01:36.029498 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_acea85f9-50c3-4f55-8928-dbcf70e29709/test-operator-logs-container/0.log" Jan 31 07:01:36 crc kubenswrapper[4712]: I0131 07:01:36.130839 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-b462n_55c71843-8f9f-4f1b-904d-e05dc4a2ea25/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.563081 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:01:40 crc kubenswrapper[4712]: E0131 07:01:40.564220 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0a12f3a-54d0-4490-9b23-1942bbe8520e" containerName="keystone-cron" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.564237 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0a12f3a-54d0-4490-9b23-1942bbe8520e" containerName="keystone-cron" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.564441 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0a12f3a-54d0-4490-9b23-1942bbe8520e" containerName="keystone-cron" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.568116 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.578091 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.623376 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khjq2\" (UniqueName: \"kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.623732 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.623778 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.725636 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khjq2\" (UniqueName: \"kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.725864 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.725904 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.726489 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.726537 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.745872 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khjq2\" (UniqueName: \"kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2\") pod \"redhat-operators-ttt75\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:40 crc kubenswrapper[4712]: I0131 07:01:40.894921 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:41 crc kubenswrapper[4712]: I0131 07:01:41.429194 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:01:41 crc kubenswrapper[4712]: I0131 07:01:41.586750 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerStarted","Data":"1bdbafaa74893405eda41da2f73e23a2e68af7543feee9aa897020f547c6e6d5"} Jan 31 07:01:42 crc kubenswrapper[4712]: I0131 07:01:42.497560 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:01:42 crc kubenswrapper[4712]: I0131 07:01:42.497635 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:01:42 crc kubenswrapper[4712]: I0131 07:01:42.597216 4712 generic.go:334] "Generic (PLEG): container finished" podID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerID="85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675" exitCode=0 Jan 31 07:01:42 crc kubenswrapper[4712]: I0131 07:01:42.597269 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerDied","Data":"85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675"} Jan 31 07:01:43 crc kubenswrapper[4712]: I0131 07:01:43.613276 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerStarted","Data":"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed"} Jan 31 07:01:44 crc kubenswrapper[4712]: I0131 07:01:44.623049 4712 generic.go:334] "Generic (PLEG): container finished" podID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerID="a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed" exitCode=0 Jan 31 07:01:44 crc kubenswrapper[4712]: I0131 07:01:44.623134 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerDied","Data":"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed"} Jan 31 07:01:45 crc kubenswrapper[4712]: I0131 07:01:45.633691 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerStarted","Data":"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea"} Jan 31 07:01:45 crc kubenswrapper[4712]: I0131 07:01:45.651128 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ttt75" podStartSLOduration=3.126064647 podStartE2EDuration="5.651108934s" podCreationTimestamp="2026-01-31 07:01:40 +0000 UTC" firstStartedPulling="2026-01-31 07:01:42.59958611 +0000 UTC m=+4968.693467961" lastFinishedPulling="2026-01-31 07:01:45.124630407 +0000 UTC m=+4971.218512248" observedRunningTime="2026-01-31 07:01:45.649209857 +0000 UTC m=+4971.743091698" watchObservedRunningTime="2026-01-31 07:01:45.651108934 +0000 UTC m=+4971.744990775" Jan 31 07:01:50 crc kubenswrapper[4712]: I0131 07:01:50.897187 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:50 crc kubenswrapper[4712]: I0131 07:01:50.897805 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:01:51 crc kubenswrapper[4712]: I0131 07:01:51.943875 4712 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ttt75" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="registry-server" probeResult="failure" output=< Jan 31 07:01:51 crc kubenswrapper[4712]: timeout: failed to connect service ":50051" within 1s Jan 31 07:01:51 crc kubenswrapper[4712]: > Jan 31 07:02:00 crc kubenswrapper[4712]: I0131 07:02:00.947346 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:02:01 crc kubenswrapper[4712]: I0131 07:02:01.013468 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:02:01 crc kubenswrapper[4712]: I0131 07:02:01.226714 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:02:01 crc kubenswrapper[4712]: I0131 07:02:01.720756 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 07:02:01 crc kubenswrapper[4712]: I0131 07:02:01.923899 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 07:02:01 crc kubenswrapper[4712]: I0131 07:02:01.963102 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.020829 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.173824 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/util/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.205545 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/extract/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.242979 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b6f0ceecaefb29a8e5801b760101e31cf6295f8d10236ea1e93fc043d1hkmdj_99a223e0-dd1b-4f6c-b531-4e7b523468bd/pull/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.476853 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-wz82s_e8223e38-a5ce-4f9f-9780-dea80a326f17/manager/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.528199 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-74szb_c0a2fda3-cecc-40e9-b15e-2d95487c7373/manager/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.642154 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-vmvmz_e48bf123-5e17-4ef3-980f-92286c95bd85/manager/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.795508 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ttt75" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="registry-server" containerID="cri-o://087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea" gracePeriod=2 Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.809357 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-z96q2_b348e7d3-8e8d-484d-bd03-b27125c4fd58/manager/0.log" Jan 31 07:02:02 crc kubenswrapper[4712]: I0131 07:02:02.900682 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-ktrfv_623f0661-5fd0-4c1c-94b8-7cb41dc60f5f/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.057668 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-7x6wg_f94c7f2e-7429-4be0-bad9-f3cdf0156ba9/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.351649 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.496205 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khjq2\" (UniqueName: \"kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2\") pod \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.496264 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities\") pod \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.496481 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content\") pod \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\" (UID: \"6fc865d5-a3ff-4905-90e4-21ec5f425e0b\") " Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.497764 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities" (OuterVolumeSpecName: "utilities") pod "6fc865d5-a3ff-4905-90e4-21ec5f425e0b" (UID: "6fc865d5-a3ff-4905-90e4-21ec5f425e0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.515511 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2" (OuterVolumeSpecName: "kube-api-access-khjq2") pod "6fc865d5-a3ff-4905-90e4-21ec5f425e0b" (UID: "6fc865d5-a3ff-4905-90e4-21ec5f425e0b"). InnerVolumeSpecName "kube-api-access-khjq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.566638 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-9djc7_d7b48b1c-633e-4714-a9fe-0cdb81dc946d/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.599554 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khjq2\" (UniqueName: \"kubernetes.io/projected/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-kube-api-access-khjq2\") on node \"crc\" DevicePath \"\"" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.599821 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.615124 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-pdzn7_d0f0cc18-6437-4c23-8ebd-f0a234fc72ff/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.679314 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fc865d5-a3ff-4905-90e4-21ec5f425e0b" (UID: "6fc865d5-a3ff-4905-90e4-21ec5f425e0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.702118 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc865d5-a3ff-4905-90e4-21ec5f425e0b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.808366 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ttt75" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.808397 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerDied","Data":"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea"} Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.808460 4712 scope.go:117] "RemoveContainer" containerID="087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.808248 4712 generic.go:334] "Generic (PLEG): container finished" podID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerID="087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea" exitCode=0 Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.808981 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ttt75" event={"ID":"6fc865d5-a3ff-4905-90e4-21ec5f425e0b","Type":"ContainerDied","Data":"1bdbafaa74893405eda41da2f73e23a2e68af7543feee9aa897020f547c6e6d5"} Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.832265 4712 scope.go:117] "RemoveContainer" containerID="a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.859745 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.867632 4712 scope.go:117] "RemoveContainer" containerID="85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.869982 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-2zprf_b81b1954-214b-40b7-886d-3da110000383/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.880543 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ttt75"] Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.921453 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-n9768_ddfcf8fb-8920-44fa-a439-ea5d5b6456f4/manager/0.log" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.938397 4712 scope.go:117] "RemoveContainer" containerID="087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea" Jan 31 07:02:03 crc kubenswrapper[4712]: E0131 07:02:03.939431 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea\": container with ID starting with 087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea not found: ID does not exist" containerID="087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.939478 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea"} err="failed to get container status \"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea\": rpc error: code = NotFound desc = could not find container \"087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea\": container with ID starting with 087160a31b36a138852f2f7ccee10dfd0824549b08da7a497adae4af7db2fbea not found: ID does not exist" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.939510 4712 scope.go:117] "RemoveContainer" containerID="a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed" Jan 31 07:02:03 crc kubenswrapper[4712]: E0131 07:02:03.940223 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed\": container with ID starting with a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed not found: ID does not exist" containerID="a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.940252 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed"} err="failed to get container status \"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed\": rpc error: code = NotFound desc = could not find container \"a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed\": container with ID starting with a89c700744bd18c2a4135d8f86ec55c2f67591730881d75ccfff59152714f0ed not found: ID does not exist" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.940274 4712 scope.go:117] "RemoveContainer" containerID="85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675" Jan 31 07:02:03 crc kubenswrapper[4712]: E0131 07:02:03.940521 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675\": container with ID starting with 85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675 not found: ID does not exist" containerID="85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675" Jan 31 07:02:03 crc kubenswrapper[4712]: I0131 07:02:03.940555 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675"} err="failed to get container status \"85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675\": rpc error: code = NotFound desc = could not find container \"85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675\": container with ID starting with 85f4bacbf8541562fa133924ebe8364673c69ffcc3ca5b50602bbf303ecdd675 not found: ID does not exist" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.116337 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-czjfw_2a9eefe0-c80b-479c-a630-4b94bea52b20/manager/0.log" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.196451 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-5vrvb_f7c522e9-e789-4fa5-9736-b6d921eba9e5/manager/0.log" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.380249 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-f9sm2_76f477c9-248d-45e0-acdc-098fd960378c/manager/0.log" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.417916 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-qwh9v_46eafe76-b842-4889-98b5-eae45c6c9a70/manager/0.log" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.515564 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" path="/var/lib/kubelet/pods/6fc865d5-a3ff-4905-90e4-21ec5f425e0b/volumes" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.615772 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-86dfb79cc789484_cb20e0cd-448c-4aaf-b20a-e4dfc0efc8cd/manager/0.log" Jan 31 07:02:04 crc kubenswrapper[4712]: I0131 07:02:04.863927 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-757f46c65d-qrpzj_23a3b35d-3b93-4e18-b4af-665b780f3580/operator/0.log" Jan 31 07:02:05 crc kubenswrapper[4712]: I0131 07:02:05.163268 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zzwgm_f74d2a07-f376-41b5-b5c3-9305ad3a03fb/registry-server/0.log" Jan 31 07:02:05 crc kubenswrapper[4712]: I0131 07:02:05.343923 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-2fg8n_0bbe4bde-a8ba-4a7e-a5b3-3ccec56ef89a/manager/0.log" Jan 31 07:02:05 crc kubenswrapper[4712]: I0131 07:02:05.482986 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-pfsg7_df910b52-e35d-4099-abe9-676b2863ee90/manager/0.log" Jan 31 07:02:05 crc kubenswrapper[4712]: I0131 07:02:05.736933 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-nr98t_4e35eb3f-d0de-469e-b54d-9fbe9fe8ae2a/operator/0.log" Jan 31 07:02:05 crc kubenswrapper[4712]: I0131 07:02:05.875643 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-kcq2k_ab883058-7fba-4506-8493-a1c290b67a44/manager/0.log" Jan 31 07:02:06 crc kubenswrapper[4712]: I0131 07:02:06.031873 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-msxfl_a0b07b7e-8267-4062-8cf1-9319d4258d13/manager/0.log" Jan 31 07:02:06 crc kubenswrapper[4712]: I0131 07:02:06.071758 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6b6f655c79-fdkzc_84f27435-bc45-4501-8dda-59f399689054/manager/0.log" Jan 31 07:02:06 crc kubenswrapper[4712]: I0131 07:02:06.215339 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-2wkdr_6d0bc1fd-d786-402b-a7b0-4f31066900f9/manager/0.log" Jan 31 07:02:06 crc kubenswrapper[4712]: I0131 07:02:06.254512 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-p7ht5_43cff116-70bd-4e43-a6a2-cffaebca6f11/manager/0.log" Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.497568 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.498261 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.498314 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.499193 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.499274 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25" gracePeriod=600 Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.886774 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25" exitCode=0 Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.886857 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25"} Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.887133 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2"} Jan 31 07:02:12 crc kubenswrapper[4712]: I0131 07:02:12.887161 4712 scope.go:117] "RemoveContainer" containerID="51531154051b7974c76c4ebba7c24f14e9a09559aa1c3dfff28acbc8967632a2" Jan 31 07:02:29 crc kubenswrapper[4712]: I0131 07:02:29.337381 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-qdzvl_dc0eb91d-cc30-4ef8-aa0b-be90744ba313/control-plane-machine-set-operator/0.log" Jan 31 07:02:29 crc kubenswrapper[4712]: I0131 07:02:29.366700 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-h8q6s_71d0663f-6a96-43ff-91fe-25bf58eb996e/kube-rbac-proxy/0.log" Jan 31 07:02:29 crc kubenswrapper[4712]: I0131 07:02:29.528358 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-h8q6s_71d0663f-6a96-43ff-91fe-25bf58eb996e/machine-api-operator/0.log" Jan 31 07:02:43 crc kubenswrapper[4712]: I0131 07:02:43.013339 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-rzlv8_d6da4569-9d12-457b-b448-2a96889fd6d0/cert-manager-controller/0.log" Jan 31 07:02:43 crc kubenswrapper[4712]: I0131 07:02:43.210132 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-m4x85_8a4a4023-5949-4fb1-b75d-375705a6ccd5/cert-manager-cainjector/0.log" Jan 31 07:02:43 crc kubenswrapper[4712]: I0131 07:02:43.270647 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-cq69c_7a3564dd-edd4-4e23-b910-084279771f4a/cert-manager-webhook/0.log" Jan 31 07:02:56 crc kubenswrapper[4712]: I0131 07:02:56.976668 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-pjjc5_59d0e8e3-1a37-42cd-a1b3-175f6a5fe8a4/nmstate-console-plugin/0.log" Jan 31 07:02:57 crc kubenswrapper[4712]: I0131 07:02:57.171458 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-44b4c_f53db0c4-e85d-4db9-b1c6-12bb90a2d886/nmstate-handler/0.log" Jan 31 07:02:57 crc kubenswrapper[4712]: I0131 07:02:57.308728 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nnmp4_f5cd590c-d8eb-429e-8023-0a0b981d2437/nmstate-metrics/0.log" Jan 31 07:02:57 crc kubenswrapper[4712]: I0131 07:02:57.309066 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nnmp4_f5cd590c-d8eb-429e-8023-0a0b981d2437/kube-rbac-proxy/0.log" Jan 31 07:02:57 crc kubenswrapper[4712]: I0131 07:02:57.452887 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-tp5m4_9f81c039-533b-498a-8958-8b217806c189/nmstate-operator/0.log" Jan 31 07:02:57 crc kubenswrapper[4712]: I0131 07:02:57.544630 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-ftd75_737942ad-fc1c-4559-a40a-e772801f3da4/nmstate-webhook/0.log" Jan 31 07:03:26 crc kubenswrapper[4712]: I0131 07:03:26.923269 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-pwnsk_693f3580-e3b0-4892-a4be-1be046ccd732/kube-rbac-proxy/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.105396 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-pwnsk_693f3580-e3b0-4892-a4be-1be046ccd732/controller/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.139310 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.343347 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.385580 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.412209 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.479305 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.573012 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.616929 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.617277 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.658644 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.841279 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-frr-files/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.866069 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/controller/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.879339 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-reloader/0.log" Jan 31 07:03:27 crc kubenswrapper[4712]: I0131 07:03:27.904051 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/cp-metrics/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.052869 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/frr-metrics/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.058580 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/kube-rbac-proxy/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.158355 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/kube-rbac-proxy-frr/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.278545 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/reloader/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.385546 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-79qrg_4171e791-18bd-4302-933e-e49a8ad12e63/frr-k8s-webhook-server/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.557994 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f5c7f4d69-nvgqw_28a2f231-88ed-4f0d-941b-aa351dcabfd8/manager/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.764011 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6d4f9bc9b4-stlsv_0237140f-4dc3-42f6-8621-f96e8732af5e/webhook-server/0.log" Jan 31 07:03:28 crc kubenswrapper[4712]: I0131 07:03:28.893990 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j9fwd_5a9228a1-741f-49a7-8e70-5f2079f89755/kube-rbac-proxy/0.log" Jan 31 07:03:29 crc kubenswrapper[4712]: I0131 07:03:29.459872 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j9fwd_5a9228a1-741f-49a7-8e70-5f2079f89755/speaker/0.log" Jan 31 07:03:29 crc kubenswrapper[4712]: I0131 07:03:29.679042 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7xh8p_cb060e1c-3daf-49df-9e19-da31fc5b719c/frr/0.log" Jan 31 07:03:42 crc kubenswrapper[4712]: I0131 07:03:42.750420 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 07:03:42 crc kubenswrapper[4712]: I0131 07:03:42.943579 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 07:03:42 crc kubenswrapper[4712]: I0131 07:03:42.958414 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 07:03:42 crc kubenswrapper[4712]: I0131 07:03:42.965860 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.133684 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/util/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.142615 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/extract/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.149086 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc9nlxq_ffe754a1-dc64-42e6-a072-d949b564a821/pull/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.293703 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.480868 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.489835 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.494560 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.700328 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/util/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.719783 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/pull/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.740255 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7136w5qx_dd700cd6-556a-4331-b411-5551352a6b8f/extract/0.log" Jan 31 07:03:43 crc kubenswrapper[4712]: I0131 07:03:43.912092 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 07:03:44 crc kubenswrapper[4712]: I0131 07:03:44.768362 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 07:03:44 crc kubenswrapper[4712]: I0131 07:03:44.790671 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 07:03:44 crc kubenswrapper[4712]: I0131 07:03:44.813866 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.034491 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-utilities/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.038334 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/extract-content/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.264731 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.538656 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.609819 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.640640 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.719630 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gwwfc_61c05007-0401-49ee-a678-0b94d67b2d5e/registry-server/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.839553 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-utilities/0.log" Jan 31 07:03:45 crc kubenswrapper[4712]: I0131 07:03:45.841677 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/extract-content/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.133454 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-jp69c_d31cb490-ecb9-4f62-8633-a6239f98d3a2/marketplace-operator/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.251820 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.403926 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v94kc_08bb8992-f20d-4480-9126-1793aa64b210/registry-server/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.440932 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.510689 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.520771 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.716824 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-utilities/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.740913 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/extract-content/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.746631 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.932055 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.936264 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qqkm2_f2b022ce-dad8-4c4c-a30b-4e2cbd06553a/registry-server/0.log" Jan 31 07:03:46 crc kubenswrapper[4712]: I0131 07:03:46.989882 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 07:03:47 crc kubenswrapper[4712]: I0131 07:03:47.014980 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 07:03:47 crc kubenswrapper[4712]: I0131 07:03:47.133926 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-utilities/0.log" Jan 31 07:03:47 crc kubenswrapper[4712]: I0131 07:03:47.141494 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/extract-content/0.log" Jan 31 07:03:47 crc kubenswrapper[4712]: I0131 07:03:47.784733 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vtp6q_8a6d2146-9c2b-4913-905f-ed52fb1ac076/registry-server/0.log" Jan 31 07:04:12 crc kubenswrapper[4712]: I0131 07:04:12.497047 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:04:12 crc kubenswrapper[4712]: I0131 07:04:12.497575 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:04:42 crc kubenswrapper[4712]: I0131 07:04:42.497602 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:04:42 crc kubenswrapper[4712]: I0131 07:04:42.498468 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:05:12 crc kubenswrapper[4712]: I0131 07:05:12.496996 4712 patch_prober.go:28] interesting pod/machine-config-daemon-6hwmd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 31 07:05:12 crc kubenswrapper[4712]: I0131 07:05:12.497754 4712 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 31 07:05:12 crc kubenswrapper[4712]: I0131 07:05:12.497797 4712 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" Jan 31 07:05:12 crc kubenswrapper[4712]: I0131 07:05:12.498585 4712 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2"} pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 31 07:05:12 crc kubenswrapper[4712]: I0131 07:05:12.498654 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerName="machine-config-daemon" containerID="cri-o://247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" gracePeriod=600 Jan 31 07:05:12 crc kubenswrapper[4712]: E0131 07:05:12.635449 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:05:13 crc kubenswrapper[4712]: I0131 07:05:13.577344 4712 generic.go:334] "Generic (PLEG): container finished" podID="eaac0246-673f-4670-8b7b-c27ecaf0d847" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" exitCode=0 Jan 31 07:05:13 crc kubenswrapper[4712]: I0131 07:05:13.577530 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerDied","Data":"247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2"} Jan 31 07:05:13 crc kubenswrapper[4712]: I0131 07:05:13.577714 4712 scope.go:117] "RemoveContainer" containerID="976e9973eae0d31adf73d583b4df4b5fb1b138f46b2f6dc612fbba0ffc5a3e25" Jan 31 07:05:13 crc kubenswrapper[4712]: I0131 07:05:13.578773 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:05:13 crc kubenswrapper[4712]: E0131 07:05:13.579318 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:05:26 crc kubenswrapper[4712]: I0131 07:05:26.505037 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:05:26 crc kubenswrapper[4712]: E0131 07:05:26.506282 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:05:39 crc kubenswrapper[4712]: I0131 07:05:39.504365 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:05:39 crc kubenswrapper[4712]: E0131 07:05:39.505365 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:05:40 crc kubenswrapper[4712]: I0131 07:05:40.831902 4712 generic.go:334] "Generic (PLEG): container finished" podID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerID="a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0" exitCode=0 Jan 31 07:05:40 crc kubenswrapper[4712]: I0131 07:05:40.831996 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5mfnn/must-gather-s58sh" event={"ID":"faddc91f-ad86-4122-9e6c-72fd3d9abf3c","Type":"ContainerDied","Data":"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0"} Jan 31 07:05:40 crc kubenswrapper[4712]: I0131 07:05:40.832940 4712 scope.go:117] "RemoveContainer" containerID="a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0" Jan 31 07:05:41 crc kubenswrapper[4712]: I0131 07:05:41.300803 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5mfnn_must-gather-s58sh_faddc91f-ad86-4122-9e6c-72fd3d9abf3c/gather/0.log" Jan 31 07:05:51 crc kubenswrapper[4712]: I0131 07:05:51.824374 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5mfnn/must-gather-s58sh"] Jan 31 07:05:51 crc kubenswrapper[4712]: I0131 07:05:51.825420 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-5mfnn/must-gather-s58sh" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="copy" containerID="cri-o://b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d" gracePeriod=2 Jan 31 07:05:51 crc kubenswrapper[4712]: I0131 07:05:51.845669 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5mfnn/must-gather-s58sh"] Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.263021 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5mfnn_must-gather-s58sh_faddc91f-ad86-4122-9e6c-72fd3d9abf3c/copy/0.log" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.263688 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.352346 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output\") pod \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.352451 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjbf4\" (UniqueName: \"kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4\") pod \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\" (UID: \"faddc91f-ad86-4122-9e6c-72fd3d9abf3c\") " Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.358660 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4" (OuterVolumeSpecName: "kube-api-access-wjbf4") pod "faddc91f-ad86-4122-9e6c-72fd3d9abf3c" (UID: "faddc91f-ad86-4122-9e6c-72fd3d9abf3c"). InnerVolumeSpecName "kube-api-access-wjbf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.454831 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjbf4\" (UniqueName: \"kubernetes.io/projected/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-kube-api-access-wjbf4\") on node \"crc\" DevicePath \"\"" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.506328 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:05:52 crc kubenswrapper[4712]: E0131 07:05:52.506549 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.541102 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "faddc91f-ad86-4122-9e6c-72fd3d9abf3c" (UID: "faddc91f-ad86-4122-9e6c-72fd3d9abf3c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.557248 4712 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/faddc91f-ad86-4122-9e6c-72fd3d9abf3c-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.931220 4712 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5mfnn_must-gather-s58sh_faddc91f-ad86-4122-9e6c-72fd3d9abf3c/copy/0.log" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.931667 4712 generic.go:334] "Generic (PLEG): container finished" podID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerID="b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d" exitCode=143 Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.931735 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5mfnn/must-gather-s58sh" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.931730 4712 scope.go:117] "RemoveContainer" containerID="b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d" Jan 31 07:05:52 crc kubenswrapper[4712]: I0131 07:05:52.975347 4712 scope.go:117] "RemoveContainer" containerID="a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0" Jan 31 07:05:53 crc kubenswrapper[4712]: I0131 07:05:53.082698 4712 scope.go:117] "RemoveContainer" containerID="b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d" Jan 31 07:05:53 crc kubenswrapper[4712]: E0131 07:05:53.083092 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d\": container with ID starting with b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d not found: ID does not exist" containerID="b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d" Jan 31 07:05:53 crc kubenswrapper[4712]: I0131 07:05:53.083135 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d"} err="failed to get container status \"b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d\": rpc error: code = NotFound desc = could not find container \"b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d\": container with ID starting with b141586d59828a6c27088b804c8f17d6e86e5770afa7a14e379a7ff805fab37d not found: ID does not exist" Jan 31 07:05:53 crc kubenswrapper[4712]: I0131 07:05:53.083164 4712 scope.go:117] "RemoveContainer" containerID="a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0" Jan 31 07:05:53 crc kubenswrapper[4712]: E0131 07:05:53.083748 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0\": container with ID starting with a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0 not found: ID does not exist" containerID="a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0" Jan 31 07:05:53 crc kubenswrapper[4712]: I0131 07:05:53.083776 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0"} err="failed to get container status \"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0\": rpc error: code = NotFound desc = could not find container \"a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0\": container with ID starting with a75cf9aada7177ba4cd5ec23c3de57eca3e34a5c814ea9b9617e8070cc2679b0 not found: ID does not exist" Jan 31 07:05:54 crc kubenswrapper[4712]: I0131 07:05:54.534113 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" path="/var/lib/kubelet/pods/faddc91f-ad86-4122-9e6c-72fd3d9abf3c/volumes" Jan 31 07:06:05 crc kubenswrapper[4712]: I0131 07:06:05.503966 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:06:05 crc kubenswrapper[4712]: E0131 07:06:05.504887 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:06:17 crc kubenswrapper[4712]: I0131 07:06:17.504533 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:06:17 crc kubenswrapper[4712]: E0131 07:06:17.506419 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:06:23 crc kubenswrapper[4712]: I0131 07:06:23.140153 4712 scope.go:117] "RemoveContainer" containerID="684627c9188afc2926866a66ddf29d4abad8fbd46e99062b2670ada1a215b32a" Jan 31 07:06:32 crc kubenswrapper[4712]: I0131 07:06:32.504827 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:06:32 crc kubenswrapper[4712]: E0131 07:06:32.505678 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:06:47 crc kubenswrapper[4712]: I0131 07:06:47.505424 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:06:47 crc kubenswrapper[4712]: E0131 07:06:47.507371 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:07:02 crc kubenswrapper[4712]: I0131 07:07:02.504321 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:07:02 crc kubenswrapper[4712]: E0131 07:07:02.505409 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:07:14 crc kubenswrapper[4712]: I0131 07:07:14.510593 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:07:14 crc kubenswrapper[4712]: E0131 07:07:14.511350 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:07:27 crc kubenswrapper[4712]: I0131 07:07:27.504004 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:07:27 crc kubenswrapper[4712]: E0131 07:07:27.505615 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.167073 4712 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.168529 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="registry-server" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168550 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="registry-server" Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.168567 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="gather" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168575 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="gather" Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.168585 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="extract-utilities" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168593 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="extract-utilities" Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.168622 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="copy" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168629 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="copy" Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.168649 4712 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="extract-content" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168657 4712 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="extract-content" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168863 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fc865d5-a3ff-4905-90e4-21ec5f425e0b" containerName="registry-server" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168873 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="copy" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.168890 4712 memory_manager.go:354] "RemoveStaleState removing state" podUID="faddc91f-ad86-4122-9e6c-72fd3d9abf3c" containerName="gather" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.170526 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.187573 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.290483 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.290652 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.290886 4712 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhn2m\" (UniqueName: \"kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.393255 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.393648 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.393763 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.393859 4712 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhn2m\" (UniqueName: \"kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.393979 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.414237 4712 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhn2m\" (UniqueName: \"kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m\") pod \"certified-operators-q8sfs\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.488843 4712 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:40 crc kubenswrapper[4712]: I0131 07:07:40.504756 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:07:40 crc kubenswrapper[4712]: E0131 07:07:40.505040 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:07:41 crc kubenswrapper[4712]: W0131 07:07:41.054772 4712 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41e7e323_a1be_42e4_bfd6_88144d68368d.slice/crio-f145a2e6fddc5313fa080c9401b37d4f564b9011af9d0fed2217defbcbd87a18 WatchSource:0}: Error finding container f145a2e6fddc5313fa080c9401b37d4f564b9011af9d0fed2217defbcbd87a18: Status 404 returned error can't find the container with id f145a2e6fddc5313fa080c9401b37d4f564b9011af9d0fed2217defbcbd87a18 Jan 31 07:07:41 crc kubenswrapper[4712]: I0131 07:07:41.055041 4712 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:41 crc kubenswrapper[4712]: I0131 07:07:41.918129 4712 generic.go:334] "Generic (PLEG): container finished" podID="41e7e323-a1be-42e4-bfd6-88144d68368d" containerID="973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42" exitCode=0 Jan 31 07:07:41 crc kubenswrapper[4712]: I0131 07:07:41.918197 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerDied","Data":"973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42"} Jan 31 07:07:41 crc kubenswrapper[4712]: I0131 07:07:41.918498 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerStarted","Data":"f145a2e6fddc5313fa080c9401b37d4f564b9011af9d0fed2217defbcbd87a18"} Jan 31 07:07:41 crc kubenswrapper[4712]: I0131 07:07:41.920024 4712 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 31 07:07:42 crc kubenswrapper[4712]: I0131 07:07:42.930610 4712 generic.go:334] "Generic (PLEG): container finished" podID="41e7e323-a1be-42e4-bfd6-88144d68368d" containerID="4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3" exitCode=0 Jan 31 07:07:42 crc kubenswrapper[4712]: I0131 07:07:42.930718 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerDied","Data":"4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3"} Jan 31 07:07:43 crc kubenswrapper[4712]: I0131 07:07:43.941816 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerStarted","Data":"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d"} Jan 31 07:07:43 crc kubenswrapper[4712]: I0131 07:07:43.959459 4712 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q8sfs" podStartSLOduration=2.549263914 podStartE2EDuration="3.959437197s" podCreationTimestamp="2026-01-31 07:07:40 +0000 UTC" firstStartedPulling="2026-01-31 07:07:41.919801913 +0000 UTC m=+5328.013683754" lastFinishedPulling="2026-01-31 07:07:43.329975196 +0000 UTC m=+5329.423857037" observedRunningTime="2026-01-31 07:07:43.958242158 +0000 UTC m=+5330.052123999" watchObservedRunningTime="2026-01-31 07:07:43.959437197 +0000 UTC m=+5330.053319038" Jan 31 07:07:50 crc kubenswrapper[4712]: I0131 07:07:50.489287 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:50 crc kubenswrapper[4712]: I0131 07:07:50.489825 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:50 crc kubenswrapper[4712]: I0131 07:07:50.540654 4712 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:51 crc kubenswrapper[4712]: I0131 07:07:51.049068 4712 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:51 crc kubenswrapper[4712]: I0131 07:07:51.099600 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.015050 4712 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q8sfs" podUID="41e7e323-a1be-42e4-bfd6-88144d68368d" containerName="registry-server" containerID="cri-o://7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d" gracePeriod=2 Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.430186 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.464781 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhn2m\" (UniqueName: \"kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m\") pod \"41e7e323-a1be-42e4-bfd6-88144d68368d\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.464953 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities\") pod \"41e7e323-a1be-42e4-bfd6-88144d68368d\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.465015 4712 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content\") pod \"41e7e323-a1be-42e4-bfd6-88144d68368d\" (UID: \"41e7e323-a1be-42e4-bfd6-88144d68368d\") " Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.466902 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities" (OuterVolumeSpecName: "utilities") pod "41e7e323-a1be-42e4-bfd6-88144d68368d" (UID: "41e7e323-a1be-42e4-bfd6-88144d68368d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.471310 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m" (OuterVolumeSpecName: "kube-api-access-nhn2m") pod "41e7e323-a1be-42e4-bfd6-88144d68368d" (UID: "41e7e323-a1be-42e4-bfd6-88144d68368d"). InnerVolumeSpecName "kube-api-access-nhn2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.567812 4712 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-utilities\") on node \"crc\" DevicePath \"\"" Jan 31 07:07:53 crc kubenswrapper[4712]: I0131 07:07:53.567851 4712 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhn2m\" (UniqueName: \"kubernetes.io/projected/41e7e323-a1be-42e4-bfd6-88144d68368d-kube-api-access-nhn2m\") on node \"crc\" DevicePath \"\"" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.027882 4712 generic.go:334] "Generic (PLEG): container finished" podID="41e7e323-a1be-42e4-bfd6-88144d68368d" containerID="7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d" exitCode=0 Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.027947 4712 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8sfs" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.027985 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerDied","Data":"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d"} Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.028455 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8sfs" event={"ID":"41e7e323-a1be-42e4-bfd6-88144d68368d","Type":"ContainerDied","Data":"f145a2e6fddc5313fa080c9401b37d4f564b9011af9d0fed2217defbcbd87a18"} Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.028473 4712 scope.go:117] "RemoveContainer" containerID="7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.051244 4712 scope.go:117] "RemoveContainer" containerID="4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.083847 4712 scope.go:117] "RemoveContainer" containerID="973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.122593 4712 scope.go:117] "RemoveContainer" containerID="7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d" Jan 31 07:07:54 crc kubenswrapper[4712]: E0131 07:07:54.123085 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d\": container with ID starting with 7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d not found: ID does not exist" containerID="7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.123140 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d"} err="failed to get container status \"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d\": rpc error: code = NotFound desc = could not find container \"7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d\": container with ID starting with 7f64c841d19ea6bd2bc617ed6993bdae085b62ec879e84af7673ff8acd9a3a7d not found: ID does not exist" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.123201 4712 scope.go:117] "RemoveContainer" containerID="4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3" Jan 31 07:07:54 crc kubenswrapper[4712]: E0131 07:07:54.123610 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3\": container with ID starting with 4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3 not found: ID does not exist" containerID="4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.123652 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3"} err="failed to get container status \"4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3\": rpc error: code = NotFound desc = could not find container \"4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3\": container with ID starting with 4c8e4b2d1391a27ef5680e968064c09a97aa21c6d7b8b70242fb33983c3179f3 not found: ID does not exist" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.123677 4712 scope.go:117] "RemoveContainer" containerID="973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42" Jan 31 07:07:54 crc kubenswrapper[4712]: E0131 07:07:54.124079 4712 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42\": container with ID starting with 973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42 not found: ID does not exist" containerID="973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.124117 4712 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42"} err="failed to get container status \"973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42\": rpc error: code = NotFound desc = could not find container \"973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42\": container with ID starting with 973709fdacdaa9527cec91b08eafc759de5f062c1aa1eac849ff33f207ba0c42 not found: ID does not exist" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.219695 4712 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41e7e323-a1be-42e4-bfd6-88144d68368d" (UID: "41e7e323-a1be-42e4-bfd6-88144d68368d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.280620 4712 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41e7e323-a1be-42e4-bfd6-88144d68368d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.363688 4712 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.372326 4712 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q8sfs"] Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.514114 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:07:54 crc kubenswrapper[4712]: I0131 07:07:54.514402 4712 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e7e323-a1be-42e4-bfd6-88144d68368d" path="/var/lib/kubelet/pods/41e7e323-a1be-42e4-bfd6-88144d68368d/volumes" Jan 31 07:07:54 crc kubenswrapper[4712]: E0131 07:07:54.514491 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:08:07 crc kubenswrapper[4712]: I0131 07:08:07.505519 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:08:07 crc kubenswrapper[4712]: E0131 07:08:07.506862 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:08:20 crc kubenswrapper[4712]: I0131 07:08:20.505123 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:08:20 crc kubenswrapper[4712]: E0131 07:08:20.506094 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:08:31 crc kubenswrapper[4712]: I0131 07:08:31.504434 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:08:31 crc kubenswrapper[4712]: E0131 07:08:31.505273 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:08:43 crc kubenswrapper[4712]: I0131 07:08:43.503641 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:08:43 crc kubenswrapper[4712]: E0131 07:08:43.505744 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:08:56 crc kubenswrapper[4712]: I0131 07:08:56.503699 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:08:56 crc kubenswrapper[4712]: E0131 07:08:56.504435 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:09:10 crc kubenswrapper[4712]: I0131 07:09:10.505445 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:09:10 crc kubenswrapper[4712]: E0131 07:09:10.506441 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:09:24 crc kubenswrapper[4712]: I0131 07:09:24.504324 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:09:24 crc kubenswrapper[4712]: E0131 07:09:24.505216 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:09:39 crc kubenswrapper[4712]: I0131 07:09:39.504949 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:09:39 crc kubenswrapper[4712]: E0131 07:09:39.505992 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:09:51 crc kubenswrapper[4712]: I0131 07:09:51.504714 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:09:51 crc kubenswrapper[4712]: E0131 07:09:51.505459 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:10:05 crc kubenswrapper[4712]: I0131 07:10:05.505333 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:10:05 crc kubenswrapper[4712]: E0131 07:10:05.506114 4712 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6hwmd_openshift-machine-config-operator(eaac0246-673f-4670-8b7b-c27ecaf0d847)\"" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" podUID="eaac0246-673f-4670-8b7b-c27ecaf0d847" Jan 31 07:10:18 crc kubenswrapper[4712]: I0131 07:10:18.505918 4712 scope.go:117] "RemoveContainer" containerID="247674e7f74ef7aa8c2536b4ae57045c0e78f590dee74e3e4075c123414d5ee2" Jan 31 07:10:19 crc kubenswrapper[4712]: I0131 07:10:19.530298 4712 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6hwmd" event={"ID":"eaac0246-673f-4670-8b7b-c27ecaf0d847","Type":"ContainerStarted","Data":"2031f4b3edce62d5b615defc55ede13c1e5de87b08a84599854fd17e9e311e61"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515137325404024451 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015137325405017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015137312252016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015137312252015456 5ustar corecore